id
int64
0
843k
repository_name
stringlengths
7
55
file_path
stringlengths
9
332
class_name
stringlengths
3
290
human_written_code
stringlengths
12
4.36M
class_skeleton
stringlengths
19
2.2M
total_program_units
int64
1
9.57k
total_doc_str
int64
0
4.2k
AvgCountLine
float64
0
7.89k
AvgCountLineBlank
float64
0
300
AvgCountLineCode
float64
0
7.89k
AvgCountLineComment
float64
0
7.89k
AvgCyclomatic
float64
0
130
CommentToCodeRatio
float64
0
176
CountClassBase
float64
0
48
CountClassCoupled
float64
0
589
CountClassCoupledModified
float64
0
581
CountClassDerived
float64
0
5.37k
CountDeclInstanceMethod
float64
0
4.2k
CountDeclInstanceVariable
float64
0
299
CountDeclMethod
float64
0
4.2k
CountDeclMethodAll
float64
0
4.2k
CountLine
float64
1
115k
CountLineBlank
float64
0
9.01k
CountLineCode
float64
0
94.4k
CountLineCodeDecl
float64
0
46.1k
CountLineCodeExe
float64
0
91.3k
CountLineComment
float64
0
27k
CountStmt
float64
1
93.2k
CountStmtDecl
float64
0
46.1k
CountStmtExe
float64
0
90.2k
MaxCyclomatic
float64
0
759
MaxInheritanceTree
float64
0
16
MaxNesting
float64
0
34
SumCyclomatic
float64
0
6k
4,200
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/buffer.py
turgles.buffer.ChunkBuffer
class ChunkBuffer(object): """A resizable cffi-based buffer that provides data indexed in chunks""" def __init__(self, size, chunk_size, ctype='float'): """Create a new buffer of n chunks. Parameters: size: number of chunks chunk_size: size of each chunk ctype: string of the C type to use (defaults to float) """ self.count = 0 # current number of chunks self.size = size # max number of chunks self.chunk_size = chunk_size # size of chunks self.ctype = ctype self.data = self._allocate(size) self.ctype_size = sizeof(self.data[0:1]) def _allocate(self, size): return ffi.new('{}[{}]'.format(self.ctype, size * self.chunk_size)) @property def byte_size(self): return self.count * self.chunk_size * self.ctype_size def __iter__(self): """Iterates over chunks""" chunk_size = self.chunk_size for i in range(self.count): offset = i * chunk_size yield self.data[offset:offset + chunk_size] def slice(self, size): slice = self.chunk_size * size data_size = len(self.data) num_slices, last_slice = divmod(data_size, slice) last_slice_index = num_slices * slice for i in range(0, last_slice_index, slice): yield size, self.data[i:i + slice] if last_slice: # last <size remainder assert last_slice_index % self.chunk_size == 0 remainder = data_size - last_slice_index yield (remainder // self.chunk_size, self.data[last_slice_index:data_size]) def get(self, index): """Get a chunk by index""" assert index <= self.count assert index < self.size offset = index * self.chunk_size return self.data[offset:offset + self.chunk_size] def new(self, init=None): """Return the last currently unused chunk, resizing if needed. If init is passed, chunk will be initialised to that data""" if self.count >= self.size: self.resize(self.count * 2) chunk = self.get(self.count) if init is not None: assert len(init) == self.chunk_size chunk[0:self.chunk_size] = init self.count += 1 return chunk def resize(self, new_size): """Create a new larger array, and copy data over""" assert new_size > self.size new_data = self._allocate(new_size) # copy new_data[0:self.size * self.chunk_size] = self.data self.size = new_size self.data = new_data def remove(self, index): """Remove chunk at index. Doesn't actually delete data, copies last chunk's data over data to be removed, and decreases the count""" assert index < self.count last_index = self.count - 1 data = self.get(index) if index == last_index: # easy case - nothing to do except zero last chunk last_data = data moved = None else: last_data = self.get(last_index) # copy the last chunk's data over the data to be deleted data[0:self.chunk_size] = last_data moved = last_index # zero last chunk's data last_data[0:self.chunk_size] = [0] * self.chunk_size self.count -= 1 # provide which index has now moved return moved
class ChunkBuffer(object): '''A resizable cffi-based buffer that provides data indexed in chunks''' def __init__(self, size, chunk_size, ctype='float'): '''Create a new buffer of n chunks. Parameters: size: number of chunks chunk_size: size of each chunk ctype: string of the C type to use (defaults to float) ''' pass def _allocate(self, size): pass @property def byte_size(self): pass def __iter__(self): '''Iterates over chunks''' pass def slice(self, size): pass def get(self, index): '''Get a chunk by index''' pass def new(self, init=None): '''Return the last currently unused chunk, resizing if needed. If init is passed, chunk will be initialised to that data''' pass def resize(self, new_size): '''Create a new larger array, and copy data over''' pass def remove(self, index): '''Remove chunk at index. Doesn't actually delete data, copies last chunk's data over data to be removed, and decreases the count''' pass
11
7
10
1
7
3
2
0.38
1
1
0
0
9
6
9
9
100
15
64
33
53
24
61
32
51
3
1
1
15
4,201
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/buffer.py
turgles.buffer.ShapeBuffer
class ShapeBuffer(object): """A pair of chunked buffers of data. One is the model buffer, layed out like NinjaTurtle. The other is the color buffer, which is just used in Turgles """ def __init__(self, shape, size): self.shape = shape self.model = ChunkBuffer(size, TURTLE_MODEL_DATA_SIZE) self.color = ChunkBuffer(size, TURTLE_COLOR_DATA_SIZE) self.id_to_index = {} self.index_to_id = {} @property def count(self): count = self.model.count assert count == self.color.count return count @property def size(self): size = self.model.size assert size == self.color.size return size def __iter__(self): for model, color in zip(self.model, self.color): yield model, color def slice(self, size): model_iter = self.model.slice(size) color_iter = self.color.slice(size) while 1: msize, model = next(model_iter) csize, color = next(color_iter) assert msize == csize yield msize, model, color def _update_id_map(self, id, index): self.id_to_index[id] = index self.index_to_id[index] = id def get_model(self, id): return self.model.get(self.id_to_index[id]) def get_color(self, id): return self.color.get(self.id_to_index[id]) def get(self, id): index = self.id_to_index[id] return self.model.get(index), self.color.get(index) def new(self, id, model_init=None, color_init=None): assert id not in self.id_to_index # cache the current count count = self.model.count model_data = self.model.new(model_init) color_data = self.color.new(color_init) self._update_id_map(id, count) return model_data, color_data def remove(self, id): index = self.id_to_index[id] moved_model_index = self.model.remove(index) moved_color_index = self.color.remove(index) assert moved_model_index == moved_color_index if moved_model_index: # update id map for the last turtle to new location moved_id = self.index_to_id[moved_model_index] self._update_id_map(moved_id, index) del self.index_to_id[moved_model_index] else: del self.index_to_id[index] del self.id_to_index[id]
class ShapeBuffer(object): '''A pair of chunked buffers of data. One is the model buffer, layed out like NinjaTurtle. The other is the color buffer, which is just used in Turgles ''' def __init__(self, shape, size): pass @property def count(self): pass @property def size(self): pass def __iter__(self): pass def slice(self, size): pass def _update_id_map(self, id, index): pass def get_model(self, id): pass def get_color(self, id): pass def get_model(self, id): pass def new(self, id, model_init=None, color_init=None): pass def remove(self, id): pass
14
1
5
0
5
0
1
0.11
1
2
1
0
11
5
11
11
78
15
57
34
43
6
54
32
42
2
1
1
14
4,202
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/demo.py
turgles.demo.Model
class Model(object): pass
class Model(object): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
2
0
2
1
1
0
2
1
1
0
1
0
0
4,203
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/es_renderer.py
turgles.es_renderer.ES2Renderer
class ES2Renderer(Renderer): vertex_shader = pkg_resources.resource_string( 'turgles', 'shaders/turtles_es.vert').decode('utf8') fragment_shader = pkg_resources.resource_string( 'turgles', 'shaders/turtles.frag').decode('utf8') def setup_vaos(self): self.program.bind() self.vao = {} for shape, geom in SHAPES.items(): self.vao[shape] = ESTurtleShapeRenderer(shape, self.program, geom)
class ES2Renderer(Renderer): def setup_vaos(self): pass
2
0
5
0
5
0
2
0
1
1
1
0
1
1
1
12
12
2
10
6
8
0
8
6
6
2
2
1
2
4,204
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/es_renderer.py
turgles.es_renderer.ESTurtleShapeRenderer
class ESTurtleShapeRenderer(object): """A Renderer for rendering mutliple versions of a specific turtle shape. Creates vertex/index/model arrays, and can render them given turtle data.""" def __init__(self, name, program, geometry): self.name = name self.program = program self.geometry = geometry # size of batched draw calls self.batch = BATCH_SIZE self.vertex_attr = glGetAttribLocation(self.program.id, b"vertex") self.edge_attr = glGetAttribLocation(self.program.id, b"edge") self.index_attr = glGetAttribLocation(self.program.id, b"index") # load/bind/configure vertex buffer self.vertex_buffer = VertexBuffer(GLfloat, GL_STATIC_DRAW) batched_edges = list(geometry.edges) * self.batch self.vertex_buffer.load(memory.create_vertex_buffer(batched_edges)) self.vertex_buffer.partition( [(self.vertex_attr, 4), (self.edge_attr, 3)] ) uniform_indicies = [] for i in range(self.batch): uniform_indicies.extend([i] * geometry.num_vertex) indices_buffer = memory.create_vertex_buffer(uniform_indicies) self.indices_buffer = VertexBuffer(GLfloat, GL_STATIC_DRAW) self.indices_buffer.load(indices_buffer) self.indices_buffer.set(self.index_attr, 1) def render(self, model, color, num_turtles): self.program.bind() # no VAOs so have to set manually self.vertex_buffer.partition( [(self.vertex_attr, 4), (self.edge_attr, 3)] ) self.indices_buffer.set(self.index_attr, 1) model_uniform = self.program.uniforms['turtle_model_array[0]'] color_uniform = self.program.uniforms['turtle_color_array[0]'] model_iter = model.slice(self.batch) color_iter = color.slice(self.batch) slices = zip(model_iter, color_iter) with measure("loop"): for (msize, model_slice), (csize, color_slice) in slices: assert msize == csize # load batch of turtle data with measure('load'): model_uniform.set(model_slice, size=msize) color_uniform.set(color_slice, size=msize) with measure('draw'): glDrawArrays( GL_TRIANGLES, 0, len(self.geometry.edges) // 7 * msize, ) self.vertex_buffer.unbind() self.program.unbind()
class ESTurtleShapeRenderer(object): '''A Renderer for rendering mutliple versions of a specific turtle shape. Creates vertex/index/model arrays, and can render them given turtle data.''' def __init__(self, name, program, geometry): pass def render(self, model, color, num_turtles): pass
3
1
30
5
23
2
2
0.15
1
5
2
0
2
9
2
2
67
13
47
22
44
7
39
22
36
2
1
3
4
4,205
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/gl/buffer.py
turgles.gl.buffer.VertexBuffer
class VertexBuffer(Buffer): """A VBO object to store vertex/model data. Specialisation of Buffer for attribute data, provides convient way to use glVertexAttribPointer, via set() and partition(). """ def __init__(self, element_type, draw_type): super(VertexBuffer, self).__init__( GL_ARRAY_BUFFER, element_type, draw_type) def set(self, index, size, interpolate=GL_FALSE, stride=0, offset=0, divisor=None): self.bind() glEnableVertexAttribArray(index) glVertexAttribPointer( index, size, self.element_flag, interpolate, stride, offset ) if divisor is not None: glVertexAttribDivisor(index, divisor) def partition(self, args, **kwargs): kwargs['stride'] = sum(a[1] for a in args) * self.element_size offset = 0 for attr, size in args: self.set(attr, size, offset=offset, **kwargs) offset += size * self.element_size
class VertexBuffer(Buffer): '''A VBO object to store vertex/model data. Specialisation of Buffer for attribute data, provides convient way to use glVertexAttribPointer, via set() and partition(). ''' def __init__(self, element_type, draw_type): pass def set(self, index, size, interpolate=GL_FALSE, stride=0, offset=0, divisor=None): pass def partition(self, args, **kwargs): pass
4
1
9
0
9
0
2
0.14
1
1
0
0
3
0
3
7
37
4
29
12
19
4
15
6
11
2
2
1
5
4,206
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/render/turtles.py
turgles.render.turtles.TurtleShapeVAO
class TurtleShapeVAO(object): """A VAO for rendering mutliple versions of a specific turtle shape. Creates VAO/vertex/index/model arrays, and can render them given turtle data.""" def __init__(self, name, program, geometry): self.name = name self.program = program self.geometry = geometry self.vertex_attr = glGetAttribLocation(self.program.id, b"vertex") self.edge_attr = glGetAttribLocation(self.program.id, b"edge") self.model_attr = glGetAttribLocation(self.program.id, b"turtle_model") self.color_attr = glGetAttribLocation(self.program.id, b"turtle_color") # create VAO to store Vertex attribute state for later self.vao = GLuint() glGenVertexArrays(1, self.vao) # bind VAO to record array setup/state glBindVertexArray(self.vao) # load shape data into vertex buffer self.vertex_buffer = VertexBuffer(GLfloat, GL_STATIC_DRAW) self.vertex_buffer.load(geometry.edges) self.vertex_buffer.partition( [(self.vertex_attr, 4), (self.edge_attr, 3)] ) # allocate/configure instanced buffers # turtle model buffer self.model_buffer = VertexBuffer(GLfloat, GL_STREAM_DRAW) # mat4 is 4 sequential locations array = [ (self.model_attr, 4), (self.model_attr + 1, 4), (self.model_attr + 2, 4), (self.model_attr + 3, 4), ] self.model_buffer.partition(array, divisor=1) # turtle color buffer self.color_buffer = VertexBuffer(GLfloat, GL_STREAM_DRAW) # mat3 is 3 sequential locations array = [ (self.color_attr, 3), (self.color_attr + 1, 3), (self.color_attr + 2, 3), ] self.color_buffer.partition(array, divisor=1) # VAO now configured, so unbind glBindVertexArray(0) def render(self, model, color, num_turtles): """Renders all turtles of a given shape""" self.program.bind() glBindVertexArray(self.vao) self.model_buffer.load(model.data, model.byte_size) self.color_buffer.load(color.data, color.byte_size) glDrawArraysInstanced( GL_TRIANGLES, 0, len(self.geometry.edges) // 7, # 7 = 4 for vertex, 3 for edge num_turtles ) glBindVertexArray(0) self.program.unbind()
class TurtleShapeVAO(object): '''A VAO for rendering mutliple versions of a specific turtle shape. Creates VAO/vertex/index/model arrays, and can render them given turtle data.''' def __init__(self, name, program, geometry): pass def render(self, model, color, num_turtles): '''Renders all turtles of a given shape''' pass
3
2
33
5
23
6
1
0.3
1
1
1
0
2
11
2
2
72
13
46
15
43
14
30
15
27
1
1
0
2
4,207
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/geometry.py
turgles.geometry.TurtleGeometry
class TurtleGeometry(object): """Manages the mesh for a turtle. Uses cffi to create c-arrays for storing vertices, indexes, and normals. """ def __init__(self, scale, vertices, indices, exclude): self.scale = scale self.vertices = create_vertex_buffer(vertices) self.indices = create_index_buffer(indices) self.num_vertex = len(indices) self.edges = self.calculate_edges(exclude) def calculate_edges(self, excludes): """Builds a vertex list adding barycentric coordinates to each vertex. Used to draw turtle borders efficiently, specialised to draw only the some edges. See below for references. http://stackoverflow.com/questions/18035719/drawing-a-border-on-a-2d-polygon-with-a-fragment-shader # NOQA http://codeflow.org/entries/2012/aug/02/easy-wireframe-display-with-barycentric-coordinates/ # NOQA http://strattonbrazil.blogspot.co.uk/2011/09/single-pass-wireframe-rendering_11.html # NOQA """ edges = [] MEW = 100.0 if excludes is None: excludes = [0] * len(self.indices) * 2 for i in range(0, len(self.indices), 3): # each triangle i0 = self.indices[i+0] * 4 i1 = self.indices[i+1] * 4 i2 = self.indices[i+2] * 4 e0 = excludes[i+0] e1 = excludes[i+1] e2 = excludes[i+2] p0 = self.vertices[i0:i0+4] p1 = self.vertices[i1:i1+4] p2 = self.vertices[i2:i2+4] v0 = self.vec2minus(p2, p1) v1 = self.vec2minus(p2, p0) v2 = self.vec2minus(p1, p0) area = fabs(v1[0]*v2[1] - v1[1] * v2[0]) c0 = (area/self.magnitude(v0), e1 * MEW, e2 * MEW) c1 = (e0 * MEW, area/self.magnitude(v1), e2 * MEW) c2 = (e0 * MEW, e1 * MEW, area/self.magnitude(v2)) edges.extend(p0) edges.extend(c0) edges.extend(p1) edges.extend(c1) edges.extend(p2) edges.extend(c2) return create_vertex_buffer(edges) def vec2minus(self, a, b): return a[0] - b[0], a[1] - b[1] def magnitude(self, v): return (v[0]**2 + v[1]**2) ** 0.5 @classmethod def load_file(cls, path): """Loads from file"""
class TurtleGeometry(object): '''Manages the mesh for a turtle. Uses cffi to create c-arrays for storing vertices, indexes, and normals. ''' def __init__(self, scale, vertices, indices, exclude): pass def calculate_edges(self, excludes): '''Builds a vertex list adding barycentric coordinates to each vertex. Used to draw turtle borders efficiently, specialised to draw only the some edges. See below for references. http://stackoverflow.com/questions/18035719/drawing-a-border-on-a-2d-polygon-with-a-fragment-shader # NOQA http://codeflow.org/entries/2012/aug/02/easy-wireframe-display-with-barycentric-coordinates/ # NOQA http://strattonbrazil.blogspot.co.uk/2011/09/single-pass-wireframe-rendering_11.html # NOQA ''' pass def vec2minus(self, a, b): pass def magnitude(self, v): pass @classmethod def load_file(cls, path): '''Loads from file''' pass
7
3
10
1
8
2
1
0.29
1
1
0
0
4
5
5
5
63
10
42
31
35
12
41
30
35
3
1
1
7
4,208
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/gl/uniform.py
turgles.gl.uniform.UniformError
class UniformError(Exception): pass
class UniformError(Exception): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
10
2
0
2
1
1
0
2
1
1
0
3
0
0
4,209
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/gl/uniform.py
turgles.gl.uniform.Uniform
class Uniform(object): """A shader uniform variable. Provides some convienices to set/get uniforms""" UNIFORM_TYPES = { GL_FLOAT: (GLfloat, 1), GL_FLOAT_VEC2: (GLfloat, 2), GL_FLOAT_VEC3: (GLfloat, 3), GL_FLOAT_VEC4: (GLfloat, 4), GL_INT: (GLint, 1), GL_INT_VEC2: (GLint, 2), GL_INT_VEC3: (GLint, 3), GL_INT_VEC4: (GLint, 4), GL_FLOAT_MAT2: (GLfloat, 4), GL_FLOAT_MAT3: (GLfloat, 9), GL_FLOAT_MAT4: (GLfloat, 16), } SETTERS = { # argument settings GL_FLOAT: glUniform1f, GL_FLOAT_VEC2: glUniform2f, GL_FLOAT_VEC3: glUniform3f, GL_FLOAT_VEC4: glUniform4f, GL_INT: glUniform1i, GL_INT_VEC2: glUniform2i, GL_INT_VEC3: glUniform3i, GL_INT_VEC4: glUniform4i, } VSETTERS = { GL_FLOAT: glUniform1fv, GL_FLOAT_VEC2: glUniform2fv, GL_FLOAT_VEC3: glUniform3fv, GL_FLOAT_VEC4: glUniform4fv, GL_INT: glUniform1iv, GL_INT_VEC2: glUniform2iv, GL_INT_VEC3: glUniform3iv, GL_INT_VEC4: glUniform4iv, GL_FLOAT_MAT2: glUniformMatrix2fv, GL_FLOAT_MAT3: glUniformMatrix3fv, GL_FLOAT_MAT4: glUniformMatrix4fv, } GETTERS = { GLfloat: glGetUniformfv, GLint: glGetUniformiv, } def __init__(self, program_id, index): self.program_id = program_id self.index = index self.size, self.type, self.name = load_uniform_data(program_id, index) self.location = glGetUniformLocation( program_id, self.name.encode('utf8')) # unpack type constant self.item_type, self.length = self.UNIFORM_TYPES[self.type] if self.item_type == GLfloat: self.ctypes_converter = to_float_pointer elif self.item_type == GLint: self.ctypes_converter = to_int_pointer # ctypes type to use self.ctype = self.item_type * self.length # setup correct gl functions to access self._getter = self.GETTERS[self.item_type] self._setter = self.SETTERS.get(self.type, None) self._setterv = self.VSETTERS.get(self.type) def __eq__(self, other): return self.index == other.index def get(self): params = self.ctype(*([0.0] * self.length)) self._getter(self.program_id, self.location, params) return params def set(self, *data, **kwargs): n = len(data) assert data size = kwargs.get('size', self.size) if n > 1 or self.length == 1: # use non-array setter if n != self.length: raise UniformError("Uniform '%s' is of length %d, not %d" % ( self.name, self.length, len(data))) self._setter(self.location, *data) else: # use array based setter data = data[0] if len(data) != self.length * size: raise UniformError("uniform '%s' is of length %d, not %d" % ( self.name, self.length, len(data))) if isinstance(data, FFI.CData): cdata = self.ctypes_converter(data) else: # WARNING copies data, because ctypes. Send ffi data for speed cdata = self.ctype(*data) self._setterv(self.location, size, GL_FALSE, cdata)
class Uniform(object): '''A shader uniform variable. Provides some convienices to set/get uniforms''' def __init__(self, program_id, index): pass def __eq__(self, other): pass def get(self): pass def set(self, *data, **kwargs): pass
5
1
12
0
10
2
3
0.11
1
2
1
0
4
13
4
4
100
10
81
23
76
9
39
23
34
5
1
2
10
4,210
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/gl/program.py
turgles.gl.program.VertexShaderError
class VertexShaderError(ShaderError): pass
class VertexShaderError(ShaderError): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
10
2
0
2
1
1
0
2
1
1
0
4
0
0
4,211
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/gl/program.py
turgles.gl.program.ShaderLinkerError
class ShaderLinkerError(ShaderError): pass
class ShaderLinkerError(ShaderError): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
10
2
0
2
1
1
0
2
1
1
0
4
0
0
4,212
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/gl/program.py
turgles.gl.program.Program
class Program: """Shader program abstraction. Loads/compiles/links the shaders, and handles any errors. """ def __init__(self, vertex, fragment): self.id = glCreateProgram() self.create_shader(vertex, GL_VERTEX_SHADER) self.create_shader(fragment, GL_FRAGMENT_SHADER) self.compile() self.bind() count = pointer(GLint(0)) self.uniforms = {} glGetProgramiv(self.id, GL_ACTIVE_UNIFORMS, count) for index in range(count[0]): uniform = Uniform(self.id, index) self.uniforms[uniform.name] = uniform #for v in self.uniforms.values(): # print(v.name, v.size, v.length) self.unbind() def create_shader(self, src, type): shader_id = glCreateShader(type) glShaderSource(shader_id, 1, byref(convert_to_cstring(src)), None) glCompileShader(shader_id) status = c_int(0) glGetShaderiv( shader_id, GL_OBJECT_COMPILE_STATUS_ARB, byref(status)) if not status: if type == GL_VERTEX_SHADER: exc = VertexShaderError else: exc = FragmentShaderError raise exc(get_shader_log(shader_id)) else: glAttachShader(self.id, shader_id) def compile(self): glLinkProgram(self.id) status = c_int(0) glGetProgramiv(self.id, GL_LINK_STATUS, byref(status)) if not status: raise ShaderLinkerError(get_program_log(self.id)) def bind(self): glUseProgram(self.id) def unbind(self): glUseProgram(0)
class Program: '''Shader program abstraction. Loads/compiles/links the shaders, and handles any errors. ''' def __init__(self, vertex, fragment): pass def create_shader(self, src, type): pass def compile(self): pass def bind(self): pass def unbind(self): pass
6
1
9
1
8
0
2
0.13
0
6
4
0
5
2
5
5
56
12
39
15
33
5
36
15
30
3
0
2
9
4,213
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/gl/program.py
turgles.gl.program.FragmentShaderError
class FragmentShaderError(ShaderError): pass
class FragmentShaderError(ShaderError): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
10
2
0
2
1
1
0
2
1
1
0
4
0
0
4,214
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/benchmarks/bench.py
bench.TurtleView3
class TurtleView3(): """Constant offsets and memoryview. 3.3 only""" __slots__ = ('view') def __init__(self, turtles, num): self.view = memoryview(turtles)[num:num + 4] def getx(self): return self.view[0] def setx(self, x): self.view[0] = x x = property(getx, setx) def gety(self): return self.view[1] def sety(self, y): self.view[1] = y y = property(gety, sety) def move(self, dx, dy): self.x += dx self.y += dy
class TurtleView3(): '''Constant offsets and memoryview. 3.3 only''' def __init__(self, turtles, num): pass def getx(self): pass def setx(self, x): pass def gety(self): pass def sety(self, y): pass def move(self, dx, dy): pass
7
1
2
0
2
0
1
0.06
0
1
0
0
6
1
6
6
23
5
17
11
10
1
17
11
10
1
0
0
6
4,215
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/gl/buffer.py
turgles.gl.buffer.Buffer
class Buffer(object): """Generic buffer abstraction. Creation, binding and loading of GPU buffers. """ def __init__(self, array_type, element_type, draw_type): self.array_type = array_type self.element_type = element_type self.element_flag, self.element_size = GL_TYPEMAP[element_type] self.draw_type = draw_type # how much GPU memory have we added so far self.buffer_size = 0 self.id = GLuint() glGenBuffers(1, self.id) def bind(self): glBindBuffer(self.array_type, self.id) def unbind(self): """Same for all buffer types""" glBindBuffer(self.array_type, 0) def load(self, data, size=None): """Data is cffi array""" self.bind() if size is None: # ffi's sizeof understands arrays size = sizeof(data) if size == self.buffer_size: # same size - no need to allocate new buffer, just copy glBufferSubData( self.array_type, 0, size, to_raw_pointer(data) ) else: # buffer size has changed - need to allocate new buffer in the GPU glBufferData( self.array_type, size, to_raw_pointer(data), self.draw_type ) self.buffer_size = size self.unbind()
class Buffer(object): '''Generic buffer abstraction. Creation, binding and loading of GPU buffers. ''' def __init__(self, array_type, element_type, draw_type): pass def bind(self): pass def unbind(self): '''Same for all buffer types''' pass def load(self, data, size=None): '''Data is cffi array''' pass
5
3
10
1
8
2
2
0.27
1
0
0
1
4
7
4
4
49
7
33
11
28
9
22
11
17
3
1
1
6
4,216
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/benchmarks/bench.py
bench.TurtleView2
class TurtleView2(): """Global array reference and per instance offsets.""" __slots__ = ('X', 'Y', 'ANGLE', 'SIZE') def __init__(self, _, num): self.X, self.Y, self.ANGLE, self.SIZE = (num + i for i in range(4)) def getx(self): return TURTLES[self.X] def setx(self, x): TURTLES[self.X] = x x = property(getx, setx) def gety(self): return TURTLES[self.Y] def sety(self, y): TURTLES[self.Y] = y y = property(gety, sety) def move(self, dx, dy): self.x += dx self.y += dy
class TurtleView2(): '''Global array reference and per instance offsets.''' def __init__(self, _, num): pass def getx(self): pass def setx(self, x): pass def gety(self): pass def sety(self, y): pass def move(self, dx, dy): pass
7
1
2
0
2
0
1
0.06
0
1
0
0
6
4
6
6
23
5
17
11
10
1
17
11
10
1
0
0
6
4,217
AllTheWayDown/turgles
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AllTheWayDown_turgles/turgles/renderer.py
turgles.renderer.Renderer
class Renderer(object): vertex_shader = pkg_resources.resource_string( 'turgles', 'shaders/turtles.vert').decode('utf8') fragment_shader = pkg_resources.resource_string( 'turgles', 'shaders/turtles.frag').decode('utf8') def __init__( self, width, height, samples=None, buffer_size=16): self.width = width self.half_width = width // 2 self.height = height self.half_height = height // 2 self.create_window(width, height, samples) self.set_background_color() self.compile_program() self.setup_vaos() self.manager = BufferManager(buffer_size) self.perspective_matrix = identity() self.set_perspective() self.view_matrix = identity() self.view_matrix[12] = 0.0 self.view_matrix[13] = 0.0 self.view_matrix[14] = 0.0 self.set_view() def create_window(self, width, height, samples): kwargs = dict(double_buffer=True) max_samples = GLint() glGetIntegerv(GL_MAX_SAMPLES, max_samples) if max_samples.value > 0: kwargs['sample_buffers'] = 1 kwargs['samples'] = min(max_samples.value, 16) print("Setting antialiasing to %s" % kwargs['samples']) self.config = pyglet.gl.Config(**kwargs) self.window = pyglet.window.Window( config=self.config, width=int(width), height=int(height) ) glEnable(GL_DEPTH_TEST) self.speed = 1 self.keys = key.KeyStateHandler() self.window.push_handlers(self.keys) pyglet.clock.schedule_interval(self.move_camera, 1/30) @self.window.event def on_resize(width, height): self.width = width self.height = height self.set_perspective() return pyglet.event.EVENT_HANDLED def move_camera(self, dt): if self.keys[key.UP]: self.view_matrix[13] -= self.speed * dt elif self.keys[key.DOWN]: self.view_matrix[13] += self.speed * dt if self.keys[key.LEFT]: self.view_matrix[12] += self.speed * dt elif self.keys[key.RIGHT]: self.view_matrix[12] -= self.speed * dt if self.keys[key.PAGEUP]: self.view_matrix[14] += self.speed * dt elif self.keys[key.PAGEDOWN]: self.view_matrix[14] -= self.speed * dt self.set_view() def set_perspective(self, near=0, far=3, fov=90.0): scale = 1.0 / tan(radians(fov) / 2.0) self.perspective_matrix[0] = scale / (self.width / self.height) self.perspective_matrix[5] = scale self.perspective_matrix[10] = -(far + near) / (far - near) self.perspective_matrix[11] = -1.0 self.perspective_matrix[14] = (-2 * far * near) / (far - near) # ortho, doesn't work # self.perspective_matrix[0] = 2.0/self.width # self.perspective_matrix[5] = 2.0/self.height # self.perspective_matrix[10] = 1.0/(far - near) # self.perspective_matrix[14] = -near/(far - near) # self.perspective_matrix[15] = 1.0 self.program.bind() self.program.uniforms['projection'].set( self.perspective_matrix) scale = min(self.width, self.height) // 2 self.program.uniforms['world_scale'].set(scale) self.program.unbind() glViewport( 0, 0, (GLsizei)(int(self.width)), (GLsizei)(int(self.height)) ) def set_view(self): self.program.bind() self.program.uniforms['view'].set( self.view_matrix) self.program.unbind() def set_background_color(self, color=None): if color is None: glClearColor(1.0, 1.0, 1.0, 0.0) else: glClearColor(color[0], color[1], color[2], 0.0) def compile_program(self): self.program = Program(self.vertex_shader, self.fragment_shader) def setup_vaos(self): self.program.bind() self.vao = {} for shape, geom in SHAPES.items(): self.vao[shape] = TurtleShapeVAO(shape, self.program, geom) # ninjaturtle engine interface def render(self, flip=True): self.window.clear() for buffer in self.manager.buffers.values(): if buffer.count > 0: vao = self.vao[buffer.shape] vao.render(buffer.model, buffer.color, buffer.count) if flip: self.window.flip() # ninjaturtle engine interface def create_turtle(self, model, init, shape='classic'): model_init = init[:TURTLE_MODEL_DATA_SIZE] color_init = init[TURTLE_MODEL_DATA_SIZE:] assert len(color_init) == TURTLE_COLOR_DATA_SIZE data, color = self.manager.create_turtle( model.id, shape, model_init, color_init) model.data = data model.backend = Turgle(self, model, color, shape) # ninjaturtle engine interface def destroy_turtle_data(self, id): self.manager.destroy_turtle(id)
class Renderer(object): def __init__( self, width, height, samples=None, buffer_size=16): pass def create_window(self, width, height, samples): pass @self.window.event def on_resize(width, height): pass def move_camera(self, dt): pass def set_perspective(self, near=0, far=3, fov=90.0): pass def set_view(self): pass def set_background_color(self, color=None): pass def compile_program(self): pass def setup_vaos(self): pass def render(self, flip=True): pass def create_turtle(self, model, init, shape='classic'): pass def destroy_turtle_data(self, id): pass
14
0
11
1
10
1
2
0.08
1
6
4
1
11
13
11
11
151
23
119
43
100
9
96
37
83
7
1
2
24
4,218
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/tests/test_buffers.py
turgles.tests.test_buffers.BufferManagerTestCase
class BufferManagerTestCase(TestCase): def test_get_buffer(self): manager = BufferManager(4) self.assertEqual(len(manager.buffers), 0) buffer1 = manager.get_buffer('classic') self.assertEqual(len(manager.buffers), 1) self.assertIn('classic', manager.buffers) self.assertEqual(buffer1.size, 4) buffer2 = manager.get_buffer('classic') self.assertEqual(len(manager.buffers), 1) self.assertIs(buffer1, buffer2) def test_create_turtle(self): manager = BufferManager(4) model, color = manager.create_turtle( 0, 'classic', MODEL_ONES, COLOR_ONES) self.assertEqual(list(model), MODEL_ONES) self.assertEqual(list(color), COLOR_ONES) self.assertEqual(len(manager.buffers), 1) self.assertIn('classic', manager.buffers) self.assertEqual(manager.buffers['classic'].size, 4) def test_set_shape(self): manager = BufferManager(4) model, color = manager.create_turtle( 0, 'classic', MODEL_ONES, COLOR_ONES) model2, color2 = manager.set_shape(0, 'turtle') self.assertEqual(len(manager.buffers), 2) self.assertIn('turtle', manager.buffers) self.assertEqual(list(model2), MODEL_ONES) self.assertEqual(list(color2), COLOR_ONES) def test_destroy_turtle(self): manager = BufferManager(4) model, color = manager.create_turtle( 0, 'classic', MODEL_ONES, COLOR_ONES) manager.destroy_turtle(0) self.assertEqual(list(model), MODEL_ZEROS) self.assertEqual(list(color), COLOR_ZEROS) self.assertEqual(manager.buffers['classic'].count, 0)
class BufferManagerTestCase(TestCase): def test_get_buffer(self): pass def test_create_turtle(self): pass def test_set_shape(self): pass def test_destroy_turtle(self): pass
5
0
9
0
9
0
1
0
1
2
1
0
4
0
4
76
41
4
37
15
32
0
34
15
29
1
2
0
4
4,219
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/tests/test_buffers.py
turgles.tests.test_buffers.ChunkBufferTestCase
class ChunkBufferTestCase(TestCase): def assert_turtle_data(self, buffer, index, data): offset = index * TURTLE_MODEL_DATA_SIZE slice = buffer.data[offset:offset + TURTLE_MODEL_DATA_SIZE] self.assertEqual(list(slice), data) def test_sized_correctly(self): buffer = ChunkBuffer(4, TURTLE_MODEL_DATA_SIZE) self.assertEqual(len(buffer.data), 4 * TURTLE_MODEL_DATA_SIZE) self.assertEqual(buffer.count, 0) self.assertEqual(buffer.size, 4) def test_new(self): buffer = ChunkBuffer(4, TURTLE_MODEL_DATA_SIZE) data = buffer.new() self.assertEqual(len(data), TURTLE_MODEL_DATA_SIZE) self.assertEqual(list(data), MODEL_ZEROS) self.assertEqual(buffer.count, 1) def test_new_with_init(self): buffer = ChunkBuffer(4, TURTLE_MODEL_DATA_SIZE) init = list(reversed(range(TURTLE_MODEL_DATA_SIZE))) data = buffer.new(init) self.assertEqual(len(data), TURTLE_MODEL_DATA_SIZE) self.assertEqual(list(data), init) def test_mutlple_new(self): buffer = ChunkBuffer(4, TURTLE_MODEL_DATA_SIZE) buffer.new() self.assertEqual(buffer.count, 1) buffer.new() self.assertEqual(buffer.count, 2) def test_new_triggers_resize(self): buffer = ChunkBuffer(2, TURTLE_MODEL_DATA_SIZE) buffer.new() buffer.new() self.assertEqual(buffer.size, 2) self.assertEqual(buffer.count, 2) buffer.new() self.assertEqual(buffer.size, 4) self.assertEqual(buffer.count, 3) def test_resize(self): buffer = ChunkBuffer(2, TURTLE_MODEL_DATA_SIZE) buffer.new(MODEL_ONES) buffer.new(MODEL_TWOS) buffer.resize(4) self.assertEqual(buffer.size, 4) self.assertEqual(len(buffer.data), 4 * TURTLE_MODEL_DATA_SIZE) self.assert_turtle_data(buffer, 0, MODEL_ONES) self.assert_turtle_data(buffer, 1, MODEL_TWOS) self.assert_turtle_data(buffer, 2, MODEL_ZEROS) self.assert_turtle_data(buffer, 3, MODEL_ZEROS) def test_remove_end(self): buffer = ChunkBuffer(4, TURTLE_MODEL_DATA_SIZE) buffer.new(MODEL_ONES) buffer.new(MODEL_TWOS) buffer.new(MODEL_THREES) moved = buffer.remove(2) self.assertEqual(buffer.count, 2) self.assertIsNone(moved) self.assert_turtle_data(buffer, 0, MODEL_ONES) self.assert_turtle_data(buffer, 1, MODEL_TWOS) self.assert_turtle_data(buffer, 2, MODEL_ZEROS) def test_remove_start(self): buffer = ChunkBuffer(4, TURTLE_MODEL_DATA_SIZE) buffer.new(MODEL_ONES) buffer.new(MODEL_TWOS) buffer.new(MODEL_THREES) moved = buffer.remove(0) self.assertEqual(buffer.count, 2) self.assertEqual(moved, 2) self.assert_turtle_data(buffer, 0, MODEL_THREES) self.assert_turtle_data(buffer, 1, MODEL_TWOS) self.assert_turtle_data(buffer, 2, MODEL_ZEROS) def test_remove_middle(self): buffer = ChunkBuffer(4, TURTLE_MODEL_DATA_SIZE) buffer.new(MODEL_ONES) buffer.new(MODEL_TWOS) buffer.new(MODEL_THREES) moved = buffer.remove(1) self.assertEqual(buffer.count, 2) self.assertEqual(moved, 2) self.assert_turtle_data(buffer, 0, MODEL_ONES) self.assert_turtle_data(buffer, 1, MODEL_THREES) self.assert_turtle_data(buffer, 2, MODEL_ZEROS) def test_remove_then_add(self): buffer = ChunkBuffer(4, TURTLE_MODEL_DATA_SIZE) buffer.new(MODEL_ONES) buffer.new(MODEL_TWOS) buffer.new(MODEL_THREES) buffer.remove(2) self.assertEqual(buffer.count, 2) # check data was zeroed self.assert_turtle_data(buffer, 2, MODEL_ZEROS) buffer.new([4] * TURTLE_MODEL_DATA_SIZE) self.assertEqual(buffer.count, 3) # check reuses previously removed turtle's space self.assert_turtle_data(buffer, 2, [4] * TURTLE_MODEL_DATA_SIZE) def make_slices(self, size, array_size=20): buffer = ChunkBuffer(array_size, TURTLE_MODEL_DATA_SIZE) for i in range(array_size): buffer.new([i+1] * TURTLE_MODEL_DATA_SIZE) return buffer.slice(size) def test_slice_size_multiple(self): slices = self.make_slices(10, 20) size, slice = next(slices) self.assertEqual(size, 10) self.assertEqual( list(slice[0:TURTLE_MODEL_DATA_SIZE]), [1] * TURTLE_MODEL_DATA_SIZE ) size, slice = next(slices) self.assertEqual(size, 10) self.assertEqual( list(slice[0:TURTLE_MODEL_DATA_SIZE]), [11] * TURTLE_MODEL_DATA_SIZE ) with self.assertRaises(StopIteration): next(slices) def test_slice_size_remainder(self): slices = self.make_slices(15, 20) size, slice = next(slices) self.assertEqual(size, 15) self.assertEqual( list(slice[0:TURTLE_MODEL_DATA_SIZE]), [1] * TURTLE_MODEL_DATA_SIZE ) size, slice = next(slices) self.assertEqual(size, 5) self.assertEqual( list(slice[0:TURTLE_MODEL_DATA_SIZE]), [16] * TURTLE_MODEL_DATA_SIZE ) with self.assertRaises(StopIteration): next(slices) def test_slice_size_only_one(self): slices = self.make_slices(20, 10) size, slice = next(slices) self.assertEqual(size, 10) self.assertEqual( list(slice[0:TURTLE_MODEL_DATA_SIZE]), [1] * TURTLE_MODEL_DATA_SIZE ) with self.assertRaises(StopIteration): next(slices)
class ChunkBufferTestCase(TestCase): def assert_turtle_data(self, buffer, index, data): pass def test_sized_correctly(self): pass def test_new(self): pass def test_new_with_init(self): pass def test_mutlple_new(self): pass def test_new_triggers_resize(self): pass def test_resize(self): pass def test_remove_end(self): pass def test_remove_start(self): pass def test_remove_middle(self): pass def test_remove_then_add(self): pass def make_slices(self, size, array_size=20): pass def test_slice_size_multiple(self): pass def test_slice_size_remainder(self): pass def test_slice_size_only_one(self): pass
16
0
9
0
9
0
1
0.01
1
5
1
0
15
0
15
87
158
17
139
42
123
2
124
42
108
2
2
1
16
4,220
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/tests/test_buffers.py
turgles.tests.test_buffers.ShapeBufferTestCase
class ShapeBufferTestCase(TestCase): def assert_id_map(self, buffer, id, index): self.assertIn(id, buffer.id_to_index) self.assertIn(index, buffer.index_to_id) self.assertEqual(buffer.id_to_index[id], index) self.assertEqual(buffer.index_to_id[index], id) def assert_turtle_data(self, buffer, id, index, model, color): if id: self.assert_id_map(buffer, id, index) model_data = buffer.model.get(index) color_data = buffer.color.get(index) self.assertEqual(list(model_data), model) self.assertEqual(list(color_data), color) def test_new(self): buffer = ShapeBuffer('shape', 4) model, color = buffer.new(0) self.assert_turtle_data(buffer, 0, 0, MODEL_ZEROS, COLOR_ZEROS) self.assertEqual(buffer.count, 1) def test_new_bad_id(self): buffer = ShapeBuffer('shape', 4) buffer.new(0) with self.assertRaises(AssertionError): buffer.new(0) def test_new_with_init(self): buffer = ShapeBuffer('shape', 4) model, color = buffer.new(0, MODEL_ONES, COLOR_TWOS) self.assert_turtle_data(buffer, 0, 0, MODEL_ONES, COLOR_TWOS) def test_mutlple_new(self): buffer = ShapeBuffer('shape', 4) buffer.new(0) self.assert_id_map(buffer, 0, 0) self.assertEqual(buffer.count, 1) buffer.new(1) self.assert_id_map(buffer, 1, 1) self.assertEqual(buffer.count, 2) def test_remove_id_end(self): buffer = ShapeBuffer('shape', 4) buffer.new(0, MODEL_ONES, COLOR_ONES) buffer.new(1, MODEL_TWOS, COLOR_TWOS) buffer.new(2, MODEL_THREES, COLOR_THREES) self.assert_turtle_data(buffer, 2, 2, MODEL_THREES, COLOR_THREES) buffer.remove(2) self.assertEqual(buffer.count, 2) self.assert_turtle_data(buffer, 0, 0, MODEL_ONES, COLOR_ONES) self.assert_turtle_data(buffer, 1, 1, MODEL_TWOS, COLOR_TWOS) # check last one zeroed self.assert_turtle_data(buffer, None, 2, MODEL_ZEROS, COLOR_ZEROS) self.assertNotIn(2, buffer.id_to_index) self.assertNotIn(2, buffer.index_to_id) def test_remove_id_start(self): buffer = ShapeBuffer('shape', 4) buffer.new(0, MODEL_ONES, COLOR_ONES) buffer.new(1, MODEL_TWOS, COLOR_TWOS) buffer.new(2, MODEL_THREES, COLOR_THREES) self.assert_turtle_data(buffer, 0, 0, MODEL_ONES, COLOR_ONES) buffer.remove(0) self.assertEqual(buffer.count, 2) # check last one has been copied to 0 self.assert_turtle_data(buffer, 2, 0, MODEL_THREES, COLOR_THREES) self.assert_turtle_data(buffer, 1, 1, MODEL_TWOS, COLOR_TWOS) # check last one zeroed self.assert_turtle_data(buffer, None, 2, MODEL_ZEROS, COLOR_ZEROS) self.assertNotIn(0, buffer.id_to_index) self.assertNotIn(2, buffer.index_to_id) def test_remove_id_middle(self): buffer = ShapeBuffer('shape', 4) buffer.new(0, MODEL_ONES, COLOR_ONES) buffer.new(1, MODEL_TWOS, COLOR_TWOS) buffer.new(2, MODEL_THREES, COLOR_THREES) self.assert_turtle_data(buffer, 1, 1, MODEL_TWOS, COLOR_TWOS) buffer.remove(1) self.assertEqual(buffer.count, 2) # check last has been copied to 1 self.assert_turtle_data(buffer, 0, 0, MODEL_ONES, COLOR_ONES) self.assert_turtle_data(buffer, 2, 1, MODEL_THREES, COLOR_THREES) # check last one zeroed self.assert_turtle_data(buffer, None, 2, MODEL_ZEROS, COLOR_ZEROS) self.assertNotIn(1, buffer.id_to_index) self.assertNotIn(2, buffer.index_to_id)
class ShapeBufferTestCase(TestCase): def assert_id_map(self, buffer, id, index): pass def assert_turtle_data(self, buffer, id, index, model, color): pass def test_new(self): pass def test_new_bad_id(self): pass def test_new_with_init(self): pass def test_mutlple_new(self): pass def test_remove_id_end(self): pass def test_remove_id_start(self): pass def test_remove_id_middle(self): pass
10
0
9
0
8
1
1
0.07
1
3
1
0
9
0
9
81
88
9
74
21
64
5
74
21
64
2
2
1
10
4,221
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/tests/test_shaders.py
turgles.tests.test_shaders.ProgramTestCase
class ProgramTestCase(TestCase): def test_simple_shaders_compile(self): p = Program(VERTEX, FRAGMENT) self.assertEqual(len(p.uniforms), 1) self.assertIn('model', p.uniforms) def test_bad_VERTEX_shader(self): with self.assertRaises(ShaderError): Program("", FRAGMENT) def test_bad_fragment_shader(self): with self.assertRaises(ShaderError): Program(VERTEX, "")
class ProgramTestCase(TestCase): def test_simple_shaders_compile(self): pass def test_bad_VERTEX_shader(self): pass def test_bad_fragment_shader(self): pass
4
0
3
0
3
0
1
0
1
2
2
0
3
0
3
75
14
3
11
5
7
0
11
5
7
1
2
1
3
4,222
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/tests/test_shaders.py
turgles.tests.test_shaders.ShadersTestCase
class ShadersTestCase(TestCase): def assert_compiles(self, vertex=None, fragment=None): if vertex is None: vertex_data = VERTEX else: with open(vertex) as f: vertex_data = f.read() if fragment is None: fragment_data = FRAGMENT else: with open(fragment) as f: fragment_data = f.read() try: Program(vertex_data, fragment_data) except ShaderError as e: self.fail("shaders %s and %s failed to compile: %s" % ( vertex, fragment, e)) def test_turtle1_vertex_shader_compiles(self): self.assert_compiles(vertex='turgles/shaders/turtles.vert') def test_turtle_fragment_shader_compiles(self): self.assert_compiles( vertex='turgles/shaders/turtles.vert', fragment='turgles/shaders/turtles.frag', )
class ShadersTestCase(TestCase): def assert_compiles(self, vertex=None, fragment=None): pass def test_turtle1_vertex_shader_compiles(self): pass def test_turtle_fragment_shader_compiles(self): pass
4
0
8
0
8
0
2
0
1
2
2
0
3
0
3
75
28
4
24
8
20
0
18
6
14
4
2
2
6
4,223
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/turgle.py
turgles.turgle.Turgle
class Turgle(object): """Implementation of NinjaTurtle's backend api. Mainly contains methods for maniuplating the render data. """ def __init__(self, renderer, model, color, shape): self.renderer = renderer self.model = model self.color = color self._shape = shape def shape(self, shape=None): """We need to shift buffers in order to change shape""" if shape is None: return self._shape data, color = self.renderer.manager.set_shape(self.model.id, shape) self.model.data = data self.color = color self._shape = shape def shapesize(self, stretch_wid=None, stretch_len=None, outline=None): #TODO: outline if stretch_wid is stretch_len is outline is None: stretch_wid = self.model.data[2] stretch_len = self.model.data[3] return stretch_wid, stretch_len, None if stretch_wid == 0 or stretch_len == 0: raise Exception("stretch_wid/stretch_len must not be zero") if stretch_wid is not None: if stretch_len is None: self.model.data[2] = self.model.data[3] = stretch_wid else: self.model.data[2] = stretch_wid self.model.data[3] = stretch_len turtlesize = shapesize def _get_color_values(self, color): if color in COLORS: color = COLORS[color] elif color[0] == '#': color = ( int(color[1:3], 16), int(color[3:5], 16), int(color[5:7], 16) ) else: assert len(color) == 3 #TODO 0-255 color range return color def color(self, *args): n = len(args) if n == 0: return tuple(self.color[0:3]), tuple(self.color[4:7]) if n == 1: # either a colorstr or tuple values = self._get_color_values(args[0]) self.color[0:3] = values self.color[4:7] = values elif n == 3: # single color, rgb values = self._get_color_values(args) self.color[0:3] = values self.color[4:7] = values elif n == 2: # two separate colors self.pencolor(args[0]) self.fillcolor(args[1]) else: raise Exception("Invalid color arguments") def pencolor(self, *args): #TODO: store string names if len(args) == 0: return tuple(self.color[0:3]) elif len(args) == 1: color_vals = self._get_color_values(args[0]) else: # rgb params color_vals = self._get_color_values(args) self.color[0:3] = color_vals def fillcolor(self, *args): #TODO: store string names if len(args) == 0: return tuple(self.color[4:7]) elif len(args) == 1: color_vals = self._get_color_values(args[0]) else: # rgb params color_vals = self._get_color_values(args) self.color[4:7] = color_vals def hideturtle(self): # TODO pass ht = hideturtle def showturtle(self): # TODO pass st = showturtle def isvisible(self): # TODO return True def pendown(self): pass pd = down = pendown def penup(self): pass pu = up = penup def pensize(self, size=None): if size is None: return self.color[8] else: self.color[8] = size width = pensize def pen(self, **kwargs): pass def isdown(self): return False def begin_fill(): pass def end_fill(): pass def dot(self): pass def stamp(self): pass def clear(): pass def clearstamp(self, id): pass def clearstamps(self): pass def write(self): pass
class Turgle(object): '''Implementation of NinjaTurtle's backend api. Mainly contains methods for maniuplating the render data. ''' def __init__(self, renderer, model, color, shape): pass def shape(self, shape=None): '''We need to shift buffers in order to change shape''' pass def shapesize(self, stretch_wid=None, stretch_len=None, outline=None): pass def _get_color_values(self, color): pass def color(self, *args): pass def pencolor(self, *args): pass def fillcolor(self, *args): pass def hideturtle(self): pass def showturtle(self): pass def isvisible(self): pass def pendown(self): pass def penup(self): pass def pensize(self, size=None): pass def pencolor(self, *args): pass def isdown(self): pass def begin_fill(): pass def end_fill(): pass def dot(self): pass def stamp(self): pass def clear(): pass def clearstamp(self, id): pass def clearstamps(self): pass def write(self): pass
24
2
5
0
5
1
2
0.15
1
3
0
0
23
3
23
23
159
31
112
38
88
17
97
38
73
5
1
2
39
4,224
AllTheWayDown/turgles
AllTheWayDown_turgles/turgles/benchmarks/bench.py
bench.TurtleView
class TurtleView(): """Per instance offsets and array reference""" __slots__ = ('_turtles', 'X', 'Y', 'ANGLE', 'SIZE') def __init__(self, turtles, num): self._turtles = turtles self.X, self.Y, self.ANGLE, self.SIZE = (num + i for i in range(4)) def getx(self): return self._turtles[self.X] def setx(self, x): self._turtles[self.X] = x x = property(getx, setx) def gety(self): return self._turtles[self.Y] def sety(self, y): self._turtles[self.Y] = y y = property(gety, sety) def move(self, dx, dy): self.x += dx self.y += dy
class TurtleView(): '''Per instance offsets and array reference''' def __init__(self, turtles, num): pass def getx(self): pass def setx(self, x): pass def gety(self): pass def sety(self, y): pass def move(self, dx, dy): pass
7
1
2
0
2
0
1
0.06
0
1
0
0
6
5
6
6
23
4
18
12
11
1
18
12
11
1
0
0
6
4,225
AllTheWayDown/turgles
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AllTheWayDown_turgles/turgles/util.py
turgles.util.measure
class measure(object): """Context manager for recording a measurement""" def __init__(self, name): self.name = name def __call__(self, func): @functools.wraps(func) def decorator(*args, **kwargs): start = time() result = func(*args, **kwargs) MEASUREMENTS[self.name].append(time() - start) return result return decorator def __enter__(self): self.start = time() def __exit__(self, *exc_info): MEASUREMENTS[self.name].append(time() - self.start)
class measure(object): '''Context manager for recording a measurement''' def __init__(self, name): pass def __call__(self, func): pass @functools.wraps(func) def decorator(*args, **kwargs): pass def __enter__(self): pass def __exit__(self, *exc_info): pass
7
1
4
0
4
0
1
0.07
1
0
0
0
4
2
4
4
20
4
15
11
8
1
14
10
8
1
1
0
5
4,226
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/viewsets.py
tests.viewsets.DogViewSet
class DogViewSet(DynamicModelViewSet): model = Dog serializer_class = DogSerializer queryset = Dog.objects.all() ENABLE_PATCH_ALL = True
class DogViewSet(DynamicModelViewSet): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
27
5
0
5
5
4
0
5
5
4
0
3
0
0
4,227
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/models.py
tests.models.C
class C(models.Model): b = models.ForeignKey('B', related_name='cs', on_delete=models.CASCADE) d = models.ForeignKey('D', on_delete=models.CASCADE)
class C(models.Model): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
3
0
3
3
2
0
3
3
2
0
1
0
0
4,228
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/models.py
tests.models.Car
class Car(models.Model): name = models.CharField(max_length=60) country = models.ForeignKey(Country, on_delete=models.CASCADE)
class Car(models.Model): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
3
0
3
3
2
0
3
3
2
0
1
0
0
4,229
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/viewsets.py
tests.viewsets.CatViewSet
class CatViewSet(DynamicModelViewSet): serializer_class = CatSerializer queryset = Cat.objects.all()
class CatViewSet(DynamicModelViewSet): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
27
3
0
3
3
2
0
3
3
2
0
3
0
0
4,230
AltSchool/dynamic-rest
AltSchool_dynamic-rest/benchmarks/models.py
benchmarks.models.Group
class Group(models.Model): name = models.TextField() max_size = models.PositiveIntegerField() permissions = models.ManyToManyField('Permission', related_name='groups') created = models.DateTimeField(auto_now_add=True) updated = models.DateTimeField(auto_now=True)
class Group(models.Model): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
6
0
6
6
5
0
6
6
5
0
1
0
0
4,231
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/test_viewsets.py
tests.test_viewsets.TestMergeDictConvertsToDict
class TestMergeDictConvertsToDict(TestCase): def setUp(self): self.fixture = create_fixture() self.view = GroupNoMergeDictViewSet.as_view({'post': 'create'}) self.rf = RequestFactory() def test_merge_dict_request(self): data = { 'name': 'miao', 'random_input': [1, 2, 3] } # Django test submits data as multipart-form by default, # which results in request.data being a MergeDict. # Wrote UserNoMergeDictViewSet to raise an exception (return 400) # if request.data ends up as MergeDict, is not a dict, or # is a dict of lists. request = self.rf.post('/groups/', data) try: response = self.view(request) self.assertEqual(response.status_code, 201) except NotImplementedError as e: message = '{0}'.format(e) if 'request.FILES' not in message: self.fail('Unexpected error: %s' % message)
class TestMergeDictConvertsToDict(TestCase): def setUp(self): pass def test_merge_dict_request(self): pass
3
0
11
0
9
3
2
0.28
1
2
1
0
2
3
2
2
25
2
18
11
15
5
15
10
12
3
1
2
4
4,232
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/test_viewsets.py
tests.test_viewsets.BulkUpdateTestCase
class BulkUpdateTestCase(TestCase): def setUp(self): self.fixture = create_fixture() def test_bulk_update_default_style(self): ''' Test that PATCH request partially updates all submitted resources. ''' data = [{'id': 1, 'fur': 'grey'}, {'id': 2, 'fur': 'grey'}] response = self.client.patch( '/dogs/', json.dumps(data), content_type='application/json' ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertTrue('dogs' in response.data) self.assertTrue(2, len(response.data['dogs'])) self.assertTrue( all([Dog.objects.get(id=pk).fur_color == 'grey' for pk in (1, 2)]) ) def test_bulk_update_drest_style(self): # test to make sure both string '2' and integer 1 resolve correctly data = {'dogs': [{'id': 1, 'fur': 'grey'}, {'id': '2', 'fur': 'grey'}]} response = self.client.patch( '/dogs/', json.dumps(data), content_type='application/json' ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertTrue('dogs' in response.data) def test_bulk_update_with_filter(self): ''' Test that you can patch inside of the filtered queryset. ''' data = [{'id': 3, 'fur': 'gold'}] response = self.client.patch( '/dogs/?filter{fur.contains}=brown', json.dumps(data), content_type='application/json' ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertTrue(Dog.objects.get(id=3).fur_color == 'gold') def test_bulk_update_fail_without_query_param(self): ''' Test that an update-all PATCH request will fail if not explicitly using update-all syntax ''' for data in [{'fur': 'grey'}], []: response = self.client.patch( '/dogs/?filter{fur.contains}=brown', json.dumps(data), content_type='application/json' ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_patch_all_validation(self): # wrong format data = [{'fur': 'grey'}] response = self.client.patch( '/dogs/?patch-all=true', json.dumps(data), content_type='application/json' ) self.assertEqual( response.status_code, status.HTTP_400_BAD_REQUEST ) # wrong field data = {'fury': 'grey'} response = self.client.patch( '/dogs/?patch-all=true', json.dumps(data), content_type='application/json' ) self.assertEqual( response.status_code, status.HTTP_400_BAD_REQUEST ) self.assertTrue('fury' in response.content.decode('utf-8')) # non-source field data = {'is_red': True, 'fur': 'red'} response = self.client.patch( '/dogs/?patch-all=true', json.dumps(data), content_type='application/json' ) self.assertEqual( response.status_code, status.HTTP_400_BAD_REQUEST ) self.assertTrue('is_red' in response.content.decode('utf-8')) def test_patch_all(self): # the correct format for a patch-all request data = {'fur': 'grey'} response = self.client.patch( '/dogs/?patch-all=true', json.dumps(data), content_type='application/json' ) self.assertEqual( response.status_code, status.HTTP_200_OK, response.content ) content = json.loads(response.content.decode('utf-8')) num_dogs = Dog.objects.all().count() self.assertEqual( num_dogs, content['meta']['updated'] ) self.assertEqual( num_dogs, Dog.objects.filter(fur_color='grey').count(), )
class BulkUpdateTestCase(TestCase): def setUp(self): pass def test_bulk_update_default_style(self): ''' Test that PATCH request partially updates all submitted resources. ''' pass def test_bulk_update_drest_style(self): pass def test_bulk_update_with_filter(self): ''' Test that you can patch inside of the filtered queryset. ''' pass def test_bulk_update_fail_without_query_param(self): ''' Test that an update-all PATCH request will fail if not explicitly using update-all syntax ''' pass def test_patch_all_validation(self): pass def test_patch_all_validation(self): pass
8
3
15
0
13
2
1
0.16
1
1
1
0
7
1
7
7
116
9
92
23
84
15
44
23
36
2
1
1
8
4,233
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/test_viewsets.py
tests.test_viewsets.BulkDeletionTestCase
class BulkDeletionTestCase(TestCase): def setUp(self): self.fixture = create_fixture() self.ids = [i.pk for i in self.fixture.dogs] self.ids_to_delete = self.ids[:2] def test_bulk_delete_default_style(self): data = [{'id': i} for i in self.ids_to_delete] response = self.client.delete( '/dogs/', json.dumps(data), content_type='application/json', ) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual( Dog.objects.filter(id__in=self.ids_to_delete).count(), 0 ) def test_bulk_delete_drest_style(self): data = {'dogs': [{'id': i} for i in self.ids_to_delete]} response = self.client.delete( '/dogs/', json.dumps(data), content_type='application/json', ) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual( Dog.objects.filter(id__in=self.ids_to_delete).count(), 0 ) def test_bulk_delete_single(self): response = self.client.delete( '/dogs/%s' % self.ids_to_delete[0], ) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) def test_bulk_delete_invalid_single(self): data = {"dog": {"id": self.ids_to_delete[0]}} response = self.client.delete( '/dogs/', json.dumps(data), content_type='application/json', ) self.assertEqual( response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED ) def test_bulk_delete_invalid(self): data = {"id": self.ids_to_delete[0]} response = self.client.delete( '/dogs/', json.dumps(data), content_type='application/json', ) self.assertEqual( response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED ) def test_delete_on_nonexistent_raises_404(self): response = self.client.delete( '/dogs/31415' ) self.assertEqual( response.status_code, status.HTTP_404_NOT_FOUND )
class BulkDeletionTestCase(TestCase): def setUp(self): pass def test_bulk_delete_default_style(self): pass def test_bulk_delete_drest_style(self): pass def test_bulk_delete_single(self): pass def test_bulk_delete_invalid_single(self): pass def test_bulk_delete_invalid_single(self): pass def test_delete_on_nonexistent_raises_404(self): pass
8
0
9
0
9
0
1
0
1
1
1
0
7
3
7
7
71
7
64
21
56
0
29
21
21
1
1
0
7
4,234
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/test_serializers.py
tests.test_serializers.TestEphemeralSerializer
class TestEphemeralSerializer(TestCase): def setUp(self): self.fixture = create_fixture() def test_data(self): location = self.fixture.locations[0] data = {} data['pk'] = data['id'] = location.pk data['location'] = location data['groups'] = self.fixture.groups instance = EphemeralObject(data) data = LocationGroupSerializer( instance, envelope=True ).data['locationgroup'] self.assertEqual( data, {'id': 1, 'groups': [1, 2], 'location': 1} ) def test_data_count_field(self): eo = EphemeralObject({'pk': 1, 'values': [1, 1, 2]}) data = CountsSerializer(eo, envelope=True).data['counts'] self.assertEqual(data['count'], 3) self.assertEqual(data['unique_count'], 2) def test_data_count_field_returns_none_if_null_values(self): eo = EphemeralObject({'pk': 1, 'values': None}) data = CountsSerializer(eo, envelope=True).data['counts'] self.assertEqual(data['count'], None) self.assertEqual(data['unique_count'], None) def test_data_count_raises_exception_if_wrong_type(self): eo = EphemeralObject({'pk': 1, 'values': {}}) with self.assertRaises(TypeError): CountsSerializer(eo, envelope=True).data def test_to_representation_if_id_only(self): ''' Test EphemeralSerializer.to_representation() in id_only mode ''' eo = EphemeralObject({'pk': 1, 'values': None}) data = CountsSerializer(request_fields=True).to_representation(eo) self.assertEqual(data, eo.pk) def test_to_representation_request_fields_nested(self): value_count = EphemeralObject({'pk': 1, 'values': []}) nested = EphemeralObject({'pk': 1, 'value_count': value_count}) data = NestedEphemeralSerializer( request_fields={'value_count': {}}).to_representation(nested) self.assertEqual(data['value_count']['count'], 0) def test_context_nested(self): s1 = LocationGroupSerializer(context={'foo': 'bar'}) s2 = s1.fields['location'].serializer self.assertEqual(s2.context['foo'], 'bar')
class TestEphemeralSerializer(TestCase): def setUp(self): pass def test_data(self): pass def test_data_count_field(self): pass def test_data_count_field_returns_none_if_null_values(self): pass def test_data_count_raises_exception_if_wrong_type(self): pass def test_to_representation_if_id_only(self): ''' Test EphemeralSerializer.to_representation() in id_only mode ''' pass def test_to_representation_request_fields_nested(self): pass def test_context_nested(self): pass
9
1
6
0
5
0
1
0.02
1
5
4
0
8
1
8
8
56
11
44
25
35
1
39
25
30
1
1
1
8
4,235
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/test_serializers.py
tests.test_serializers.TestMeta
class TestMeta(TestCase): def test_default_name(self): serializer = DogSerializer() if hasattr(serializer.Meta, 'name'): # bust cached value del serializer.Meta.name self.assertFalse(hasattr(serializer.Meta, 'name')) self.assertEqual('dog', serializer.get_name()) def test_default_plural_name(self): serializer = DogSerializer() if hasattr(serializer.Meta, 'plural_name'): # bust cached value del serializer.Meta.plural_name self.assertFalse(hasattr(serializer.Meta, 'plural_name')) self.assertEqual('dogs', serializer.get_plural_name())
class TestMeta(TestCase): def test_default_name(self): pass def test_default_plural_name(self): pass
3
0
7
0
6
1
2
0.15
1
1
1
0
2
0
2
2
17
2
13
5
10
2
13
5
10
2
1
1
4
4,236
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/test_serializers.py
tests.test_serializers.TestListSerializer
class TestListSerializer(TestCase): def test_get_name_proxies_to_child(self): serializer = UserSerializer(many=True) self.assertTrue(isinstance(serializer, DynamicListSerializer)) self.assertEqual(serializer.get_name(), 'user') self.assertEqual(serializer.get_plural_name(), 'users')
class TestListSerializer(TestCase): def test_get_name_proxies_to_child(self): pass
2
0
5
0
5
0
1
0
1
2
2
0
1
0
1
1
7
1
6
3
4
0
6
3
4
1
1
0
1
4,237
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/test_prefetch2.py
tests.test_prefetch2.TestFastQuery
class TestFastQuery(APITestCase): def setUp(self): self.fixture = create_fixture() def _user_keys(self): return set([ 'last_name', 'name', 'favorite_pet_id', 'date_of_birth', 'favorite_pet_type_id', 'location_id', 'id', 'is_dead', ]) def test_fk_prefetch(self): with self.assertNumQueries(2): q = FastQuery(User.objects.all()) q.prefetch_related( FastPrefetch( 'location', Location.objects.all() ) ) result = q.execute() self.assertTrue( all([_['location'] for _ in result]) ) self.assertEqual( set(['blob', 'id', 'name']), set(result[0]['location'].keys()) ) def test_m2m_prefetch(self): with self.assertNumQueries(3): q = FastQuery(User.objects.all()) q.prefetch_related( FastPrefetch( 'groups', Group.objects.all() ) ) result = q.execute() self.assertTrue( all([_['groups'] for _ in result]) ) self.assertTrue( isinstance(result[0]['groups'], list) ) self.assertEqual( set(['id', 'name']), set(result[0]['groups'][0].keys()) ) def test_o2o_prefetch(self): # Create profiles for i in range(1, 4): Profile.objects.create( user=User.objects.get(pk=i), display_name='User %s' % i ) with self.assertNumQueries(2): q = FastQuery(Profile.objects.all()) q.prefetch_related( FastPrefetch( 'user', User.objects.all() ) ) result = q.execute() self.assertTrue( all([_['user'] for _ in result]) ) self.assertEqual( self._user_keys(), set(result[0]['user'].keys()) ) def test_reverse_o2o_prefetch(self): # Create profiles for i in range(1, 4): Profile.objects.create( user=User.objects.get(pk=i), display_name='User %s' % i ) with self.assertNumQueries(2): q = FastQuery(User.objects.all()) q.prefetch_related( FastPrefetch( 'profile', Profile.objects.all() ) ) result = q.execute() self.assertTrue( all(['profile' in _ for _ in result]) ) user = sorted( result, key=lambda x: 1 if x['profile'] is None else 0 )[0] self.assertEqual( set(['display_name', 'user_id', 'id', 'thumbnail_url']), set(user['profile'].keys()) ) def test_m2o_prefetch(self): with self.assertNumQueries(2): q = FastQuery(Location.objects.all()) q.prefetch_related( FastPrefetch( 'user_set', User.objects.all() ) ) result = q.execute() self.assertTrue( all(['user_set' in obj for obj in result]) ) location = six.next(( o for o in result if o['user_set'] and len(o['user_set']) > 1 )) self.assertIsNotNone(location) self.assertEqual( self._user_keys(), set(location['user_set'][0].keys()) ) def test_pagination(self): r = list(FastQuery(User.objects.all())) self.assertTrue(isinstance(r, list)) r = FastQuery(User.objects.order_by('id'))[1] self.assertEqual(1, len(r)) self.assertEqual(r[0]['id'], 2) r = FastQuery(User.objects.order_by('id'))[1:3] self.assertEqual(2, len(r)) self.assertEqual(r[0]['id'], 2) self.assertEqual(r[1]['id'], 3) with self.assertRaises(TypeError): FastQuery(User.objects.all())[:10:2] def test_nested_prefetch_by_string(self): q = FastQuery(Location.objects.filter(pk=1)) q.prefetch_related('user_set__groups') out = list(q) self.assertTrue('user_set' in out[0]) self.assertTrue('groups' in out[0]['user_set'][0]) def test_get_with_prefetch(self): # FastQuery.get() should apply prefetch filters correctly self.assertTrue( Cat.objects.filter(home=1, backup_home=3).exists() ) q = FastQuery(Location.objects.all()) q.prefetch_related( FastPrefetch( 'friendly_cats', Cat.objects.filter(home__gt=1) ) ) obj = q.get(pk=3) self.assertEqual(0, obj.friendly_cats.count()) def test_first_with_prefetch(self): # FastQuery.filter() should apply prefetch filters correctly self.assertTrue( Cat.objects.filter(home=1, backup_home=3).exists() ) q = FastQuery(Location.objects.all()) q = q.filter(pk=3) q.prefetch_related( FastPrefetch( 'friendly_cats', Cat.objects.filter(home__gt=1) ) ) obj = q.first() self.assertEqual(0, obj.friendly_cats.count())
class TestFastQuery(APITestCase): def setUp(self): pass def _user_keys(self): pass def test_fk_prefetch(self): pass def test_m2m_prefetch(self): pass def test_o2o_prefetch(self): pass def test_reverse_o2o_prefetch(self): pass def test_m2o_prefetch(self): pass def test_pagination(self): pass def test_nested_prefetch_by_string(self): pass def test_get_with_prefetch(self): pass def test_first_with_prefetch(self): pass
12
0
16
1
15
0
1
0.02
1
11
7
0
11
1
11
11
193
24
165
34
153
4
79
34
67
2
1
1
13
4,238
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/models.py
tests.models.Cat
class Cat(models.Model): name = models.TextField() home = models.ForeignKey('Location', on_delete=models.CASCADE) backup_home = models.ForeignKey( 'Location', related_name='friendly_cats', on_delete=models.CASCADE ) hunting_grounds = models.ManyToManyField( 'Location', related_name='annoying_cats', related_query_name='getoffmylawn' ) parent = models.ForeignKey( 'Cat', null=True, blank=True, related_name='kittens', on_delete=models.CASCADE )
class Cat(models.Model): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
20
0
20
6
19
0
6
6
5
0
1
0
0
4,239
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/viewsets.py
tests.viewsets.HorseViewSet
class HorseViewSet(DynamicModelViewSet): features = (DynamicModelViewSet.SORT,) model = Horse serializer_class = HorseSerializer queryset = Horse.objects.all() ordering_fields = ('name',) ordering = ('-name',)
class HorseViewSet(DynamicModelViewSet): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
27
7
0
7
7
6
0
7
7
6
0
3
0
0
4,240
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/viewsets.py
tests.viewsets.GroupViewSet
class GroupViewSet(DynamicModelViewSet): features = ( DynamicModelViewSet.INCLUDE, DynamicModelViewSet.EXCLUDE, DynamicModelViewSet.FILTER, DynamicModelViewSet.SORT, ) model = Group serializer_class = GroupSerializer queryset = Group.objects.all()
class GroupViewSet(DynamicModelViewSet): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
27
8
0
8
5
7
0
5
5
4
0
3
0
0
4,241
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/viewsets.py
tests.viewsets.GroupNoMergeDictViewSet
class GroupNoMergeDictViewSet(DynamicModelViewSet): model = Group serializer_class = GroupSerializer queryset = Group.objects.all() def create(self, request, *args, **kwargs): response = super(GroupNoMergeDictViewSet, self).create( request, *args, **kwargs ) if hasattr(request, 'data'): try: # Django<1.9, DRF<3.2 from django.utils.datastructures import MergeDict if isinstance(request.data, MergeDict): raise exceptions.ValidationError( "request.data is MergeDict" ) elif not isinstance(request.data, dict): raise exceptions.ValidationError( "request.data is not a dict" ) except BaseException: pass return response
class GroupNoMergeDictViewSet(DynamicModelViewSet): def create(self, request, *args, **kwargs): pass
2
0
22
1
20
1
5
0.04
1
3
0
0
1
0
1
28
27
2
24
7
21
1
15
7
12
5
3
3
5
4,242
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/test_viewsets.py
tests.test_viewsets.BulkCreationTestCase
class BulkCreationTestCase(TestCase): def test_post_single(self): """ Test that POST request with single resource only creates a single resource. """ data = {'name': 'foo', 'random_input': [1, 2, 3]} response = self.client.post( '/groups/', json.dumps(data), content_type='application/json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(1, Group.objects.all().count()) def test_post_bulk_from_resource_plural_name(self): data = { 'groups': [ {'name': 'foo', 'random_input': [1, 2, 3]}, {'name': 'bar', 'random_input': [4, 5, 6]}, ] } response = self.client.post( '/groups/', json.dumps(data), content_type='application/json' ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(2, Group.objects.all().count()) def test_post_bulk_from_list(self): """ Test that POST request with multiple resources created all posted resources. """ data = [ { 'name': 'foo', 'random_input': [1, 2, 3], }, { 'name': 'bar', 'random_input': [4, 5, 6], } ] response = self.client.post( '/groups/', json.dumps(data), content_type='application/json' ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(2, Group.objects.all().count()) self.assertEqual( ['foo', 'bar'], list(Group.objects.all().values_list('name', flat=True)) ) def test_post_bulk_with_existing_items_and_disabled_partial_creation(self): data = [{'name': 'foo'}, {'name': 'bar'}] Group.objects.create(name='foo') response = self.client.post( '/groups/', json.dumps(data), content_type='application/json' ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(1, Group.objects.all().count()) self.assertTrue('errors' in response.data) def test_post_bulk_with_sideloaded_results(self): u1 = User.objects.create(name='foo', last_name='bar') u2 = User.objects.create(name='foo', last_name='baz') data = [ { 'name': 'foo', 'members': [u1.pk], }, { 'name': 'bar', 'members': [u2.pk], } ] response = self.client.post( '/groups/?include[]=members.', json.dumps(data), content_type='application/json' ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) resp_data = response.data # Check top-level keys self.assertEqual( set(['users', 'groups']), set(resp_data.keys()) ) # Should be 2 of each self.assertEqual(2, len(resp_data['users'])) self.assertEqual(2, len(resp_data['groups']))
class BulkCreationTestCase(TestCase): def test_post_single(self): ''' Test that POST request with single resource only creates a single resource. ''' pass def test_post_bulk_from_resource_plural_name(self): pass def test_post_bulk_from_list(self): ''' Test that POST request with multiple resources created all posted resources. ''' pass def test_post_bulk_with_existing_items_and_disabled_partial_creation(self): pass def test_post_bulk_with_sideloaded_results(self): pass
6
2
18
0
16
2
1
0.13
1
4
2
0
5
0
5
5
96
7
79
19
73
10
34
19
28
1
1
0
5
4,243
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/test_router.py
tests.test_router.TestDynamicRouter
class TestDynamicRouter(APITestCase): def test_get_canonical_path(self): rsrc_key = DogSerializer().get_resource_key() self.assertEqual( '/dogs', DynamicRouter.get_canonical_path(rsrc_key) ) def test_get_canonical_path_with_prefix(self): set_script_prefix('/v2/') rsrc_key = DogSerializer().get_resource_key() self.assertEqual( '/v2/dogs', DynamicRouter.get_canonical_path(rsrc_key) ) clear_script_prefix() def test_get_canonical_path_with_pk(self): rsrc_key = DogSerializer().get_resource_key() self.assertEqual( '/dogs/1/', DynamicRouter.get_canonical_path(rsrc_key, pk='1') ) def test_get_canonical_path_with_keyspace(self): rsrc_key = CatSerializer().get_resource_key() self.assertEqual( '/v2/cats', DynamicRouter.get_canonical_path(rsrc_key) ) def test_get_canonical_serializer(self): rsrc_key = get_model_table(Dog) self.assertEqual( DogSerializer, DynamicRouter.get_canonical_serializer(rsrc_key) ) def test_get_canonical_serializer_by_model(self): self.assertEqual( DogSerializer, DynamicRouter.get_canonical_serializer(None, model=Dog) ) def test_get_canonical_serializer_by_instance(self): dog = Dog.objects.create( name='Snoopy', fur_color='black and white', origin='' ) self.assertEqual( DogSerializer, DynamicRouter.get_canonical_serializer(None, instance=dog) ) def test_rest_framework_router_unmodified(self): if hasattr(self, 'assertCountEqual'): method = self.assertCountEqual else: method = self.assertItemsEqual method( [ { 'post': 'create', 'get': 'list' }, { 'put': 'update', 'patch': 'partial_update', 'delete': 'destroy', 'get': 'retrieve' } ], [ route.mapping for route in DefaultRouter.routes if isinstance(route, Route) ] )
class TestDynamicRouter(APITestCase): def test_get_canonical_path(self): pass def test_get_canonical_path_with_prefix(self): pass def test_get_canonical_path_with_pk(self): pass def test_get_canonical_path_with_keyspace(self): pass def test_get_canonical_serializer(self): pass def test_get_canonical_serializer_by_model(self): pass def test_get_canonical_serializer_by_instance(self): pass def test_rest_framework_router_unmodified(self): pass
9
0
9
0
9
0
1
0
1
4
4
0
8
0
8
8
80
9
71
16
62
0
28
16
19
2
1
1
9
4,244
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/viewsets.py
dynamic_rest.viewsets.WithDynamicViewSetMixin
class WithDynamicViewSetMixin(object): """A viewset that can support dynamic API features. Attributes: features: A list of features supported by the viewset. meta: Extra data that is added to the response by the DynamicRenderer. """ DEBUG = 'debug' SIDELOADING = 'sideloading' PATCH_ALL = 'patch-all' INCLUDE = 'include[]' EXCLUDE = 'exclude[]' FILTER = 'filter{}' SORT = 'sort[]' PAGE = settings.PAGE_QUERY_PARAM PER_PAGE = settings.PAGE_SIZE_QUERY_PARAM # TODO: add support for `sort{}` pagination_class = DynamicPageNumberPagination metadata_class = DynamicMetadata features = ( DEBUG, INCLUDE, EXCLUDE, FILTER, PAGE, PER_PAGE, SORT, SIDELOADING, PATCH_ALL, ) meta = None filter_backends = (DynamicFilterBackend, DynamicSortingFilter) def initialize_request(self, request, *args, **kargs): """ Override DRF initialize_request() method to swap request.GET (which is aliased by request.query_params) with a mutable instance of QueryParams, and to convert request MergeDict to a subclass of dict for consistency (MergeDict is not a subclass of dict) """ def handle_encodings(request): """ WSGIRequest does not support Unicode values in the query string. WSGIRequest handling has a history of drifting behavior between combinations of Python versions, Django versions and DRF versions. Django changed its QUERY_STRING handling here: https://goo.gl/WThXo6. DRF 3.4.7 changed its behavior here: https://goo.gl/0ojIIO. """ try: return QueryParams(request.GET) except UnicodeEncodeError: pass s = request.environ.get('QUERY_STRING', '') try: s = s.encode('utf-8') except UnicodeDecodeError: pass return QueryParams(s) request.GET = handle_encodings(request) request = super(WithDynamicViewSetMixin, self).initialize_request( request, *args, **kargs ) try: # Django<1.9, DRF<3.2 # MergeDict doesn't have the same API as dict. # Django has deprecated MergeDict and DRF is moving away from # using it - thus, were comfortable replacing it with a QueryDict # This will allow the data property to have normal dict methods. from django.utils.datastructures import MergeDict if isinstance(request._full_data, MergeDict): data_as_dict = request.data.dicts[0] for d in request.data.dicts[1:]: data_as_dict.update(d) request._full_data = data_as_dict except BaseException: pass return request def get_renderers(self): """Optionally block Browsable API rendering. """ renderers = super(WithDynamicViewSetMixin, self).get_renderers() if settings.ENABLE_BROWSABLE_API is False: return [ r for r in renderers if not isinstance(r, BrowsableAPIRenderer) ] else: return renderers def get_request_feature(self, name, raw=False): """Parses the request for a particular feature. Arguments: name: A feature name. Returns: A feature parsed from the URL if the feature is supported, or None. """ if '[]' in name: # array-type return self.request.query_params.getlist( name) if name in self.features else None elif '{}' in name: # object-type (keys are not consistent) return self._extract_object_params( name, raw=raw) if name in self.features else {} else: # single-type return self.request.query_params.get( name) if name in self.features else None def _extract_object_params(self, name, raw=False): """ Extract object params, return as dict """ params = self.request.query_params.lists() params_map = {} original_name = name prefix = name[:-1] offset = len(prefix) for name, value in params: name_match = name == original_name if name_match: if raw and value: # filter{} as object return json.loads(value[0]) else: continue if name.startswith(prefix): if name.endswith('}'): name = name[offset:-1] elif name.endswith('}[]'): # strip off trailing [] # this fixes an Ember queryparams issue name = name[offset:-3] else: # malformed argument like: # filter{foo=bar raise exceptions.ParseError( '"%s" is not a well-formed filter key.' % name ) else: continue params_map[name] = value return params_map if not raw else None def get_queryset(self, queryset=None): """ Returns a queryset for this request. Arguments: queryset: Optional root-level queryset. """ serializer = self.get_serializer() return getattr(self, 'queryset', serializer.Meta.model.objects.all()) def get_request_fields(self): """Parses the INCLUDE and EXCLUDE features. Extracts the dynamic field features from the request parameters into a field map that can be passed to a serializer. Returns: A nested dict mapping serializer keys to True (include) or False (exclude). """ if hasattr(self, '_request_fields'): return self._request_fields include_fields = self.get_request_feature(self.INCLUDE) exclude_fields = self.get_request_feature(self.EXCLUDE) request_fields = {} for fields, include in ( (include_fields, True), (exclude_fields, False)): if fields is None: continue for field in fields: field_segments = field.split('.') num_segments = len(field_segments) current_fields = request_fields for i, segment in enumerate(field_segments): last = i == num_segments - 1 if segment: if last: current_fields[segment] = include else: if segment not in current_fields: current_fields[segment] = {} current_fields = current_fields[segment] elif not last: # empty segment must be the last segment raise exceptions.ParseError( '"%s" is not a valid field.' % field ) self._request_fields = request_fields return request_fields def get_request_patch_all(self): patch_all = self.get_request_feature(self.PATCH_ALL) if not patch_all: return None patch_all = patch_all.lower() if patch_all == 'query': pass elif is_truthy(patch_all): patch_all = True else: raise exceptions.ParseError( '"%s" is not valid for %s' % ( patch_all, self.PATCH_ALL ) ) return patch_all def get_request_debug(self): debug = self.get_request_feature(self.DEBUG) return is_truthy(debug) if debug is not None else None def get_request_sideloading(self): sideloading = self.get_request_feature(self.SIDELOADING) return is_truthy(sideloading) if sideloading is not None else None def is_update(self): if ( self.request and self.request.method.upper() in UPDATE_REQUEST_METHODS ): return True else: return False def is_delete(self): if ( self.request and self.request.method.upper() == DELETE_REQUEST_METHOD ): return True else: return False def get_serializer(self, *args, **kwargs): if 'request_fields' not in kwargs: kwargs['request_fields'] = self.get_request_fields() if 'sideloading' not in kwargs: kwargs['sideloading'] = self.get_request_sideloading() if 'debug' not in kwargs: kwargs['debug'] = self.get_request_debug() if 'envelope' not in kwargs: kwargs['envelope'] = True if self.is_update(): kwargs['include_fields'] = '*' return super( WithDynamicViewSetMixin, self ).get_serializer( *args, **kwargs ) def paginate_queryset(self, *args, **kwargs): if self.PAGE in self.features: # make sure pagination is enabled if ( self.PER_PAGE not in self.features and self.PER_PAGE in self.request.query_params ): # remove per_page if it is disabled self.request.query_params[self.PER_PAGE] = None return super( WithDynamicViewSetMixin, self ).paginate_queryset( *args, **kwargs ) return None def _prefix_inex_params(self, request, feature, prefix): values = self.get_request_feature(feature) if not values: return del request.query_params[feature] request.query_params.add( feature, [prefix + val for val in values] ) def list_related(self, request, pk=None, field_name=None): """Fetch related object(s), as if sideloaded (used to support link objects). This method gets mapped to `/<resource>/<pk>/<field_name>/` by DynamicRouter for all DynamicRelationField fields. Generally, this method probably shouldn't be overridden. An alternative implementation would be to generate reverse queries. For an exploration of that approach, see: https://gist.github.com/ryochiji/54687d675978c7d96503 """ # Explicitly disable support filtering. Applying filters to this # endpoint would require us to pass through sideload filters, which # can have unintended consequences when applied asynchronously. if self.get_request_feature(self.FILTER): raise ValidationError( 'Filtering is not enabled on relation endpoints.' ) # Prefix include/exclude filters with field_name so it's scoped to # the parent object. field_prefix = field_name + '.' self._prefix_inex_params(request, self.INCLUDE, field_prefix) self._prefix_inex_params(request, self.EXCLUDE, field_prefix) # Filter for parent object, include related field. self.request.query_params.add('filter{pk}', pk) self.request.query_params.add(self.INCLUDE, field_prefix) # Get serializer and field. serializer = self.get_serializer() field = serializer.fields.get(field_name) if field is None: raise ValidationError('Unknown field: "%s".' % field_name) # Query for root object, with related field prefetched queryset = self.get_queryset() queryset = self.filter_queryset(queryset) obj = queryset.first() if not obj: return Response("Not found", status=404) # Serialize the related data. Use the field's serializer to ensure # it's configured identically to the sideload case. One difference # is we need to set `envelope=True` to get the sideload-processor # applied. related_szr = field.get_serializer(envelope=True) try: # TODO(ryo): Probably should use field.get_attribute() but that # seems to break a bunch of things. Investigate later. related_szr.instance = getattr(obj, field.source) except ObjectDoesNotExist: # See: # http://jsonapi.org/format/#fetching-relationships-responses-404 # This is a case where the "link URL exists but the relationship # is empty" and therefore must return a 200. return Response({}, status=200) return Response(related_szr.data) def get_extra_filters(self, request): # Override this method to enable addition of extra filters # (i.e., a Q()) so custom filters can be added to the queryset without # running into https://code.djangoproject.com/ticket/18437 # which, without this, would mean that filters added to the queryset # after this is called may not behave as expected. return None
class WithDynamicViewSetMixin(object): '''A viewset that can support dynamic API features. Attributes: features: A list of features supported by the viewset. meta: Extra data that is added to the response by the DynamicRenderer. ''' def initialize_request(self, request, *args, **kargs): ''' Override DRF initialize_request() method to swap request.GET (which is aliased by request.query_params) with a mutable instance of QueryParams, and to convert request MergeDict to a subclass of dict for consistency (MergeDict is not a subclass of dict) ''' pass def handle_encodings(request): ''' WSGIRequest does not support Unicode values in the query string. WSGIRequest handling has a history of drifting behavior between combinations of Python versions, Django versions and DRF versions. Django changed its QUERY_STRING handling here: https://goo.gl/WThXo6. DRF 3.4.7 changed its behavior here: https://goo.gl/0ojIIO. ''' pass def get_renderers(self): '''Optionally block Browsable API rendering. ''' pass def get_request_feature(self, name, raw=False): '''Parses the request for a particular feature. Arguments: name: A feature name. Returns: A feature parsed from the URL if the feature is supported, or None. ''' pass def _extract_object_params(self, name, raw=False): ''' Extract object params, return as dict ''' pass def get_queryset(self, queryset=None): ''' Returns a queryset for this request. Arguments: queryset: Optional root-level queryset. ''' pass def get_request_fields(self): '''Parses the INCLUDE and EXCLUDE features. Extracts the dynamic field features from the request parameters into a field map that can be passed to a serializer. Returns: A nested dict mapping serializer keys to True (include) or False (exclude). ''' pass def get_request_patch_all(self): pass def get_request_debug(self): pass def get_request_sideloading(self): pass def is_update(self): pass def is_delete(self): pass def get_serializer(self, *args, **kwargs): pass def paginate_queryset(self, *args, **kwargs): pass def _prefix_inex_params(self, request, feature, prefix): pass def list_related(self, request, pk=None, field_name=None): '''Fetch related object(s), as if sideloaded (used to support link objects). This method gets mapped to `/<resource>/<pk>/<field_name>/` by DynamicRouter for all DynamicRelationField fields. Generally, this method probably shouldn't be overridden. An alternative implementation would be to generate reverse queries. For an exploration of that approach, see: https://gist.github.com/ryochiji/54687d675978c7d96503 ''' pass def get_extra_filters(self, request): pass
18
9
20
2
13
5
4
0.39
1
6
1
1
16
1
16
16
369
46
233
66
214
90
169
66
150
10
1
6
63
4,245
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/models.py
tests.models.A
class A(models.Model): name = models.TextField(blank=True)
class A(models.Model): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
2
0
2
2
1
0
2
2
1
0
1
0
0
4,246
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/viewsets.py
tests.viewsets.AlternateLocationViewSet
class AlternateLocationViewSet(DynamicModelViewSet): model = Location serializer_class = LocationSerializer queryset = Location.objects.all() def filter_queryset(self, queryset): user_name_separate_filter = self.request.query_params.get( 'user_name_separate' ) if user_name_separate_filter: queryset = queryset.filter(user__name=user_name_separate_filter) return super(AlternateLocationViewSet, self).filter_queryset(queryset) def get_extra_filters(self, request): user_name = request.query_params.get('user_name') if user_name: return Q(user__name=user_name) return None
class AlternateLocationViewSet(DynamicModelViewSet): def filter_queryset(self, queryset): pass def get_extra_filters(self, request): pass
3
0
6
0
6
0
2
0
1
1
0
0
2
0
2
29
18
2
16
8
13
0
14
8
11
2
3
1
4
4,247
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/test_viewsets.py
tests.test_viewsets.TestUserViewSet
class TestUserViewSet(TestCase): def setUp(self): self.view = UserViewSet() self.rf = RequestFactory() def test_get_request_fields(self): request = Request(self.rf.get('/users/', { 'include[]': ['name', 'groups.permissions'], 'exclude[]': ['groups.name'] })) self.view.request = request fields = self.view.get_request_fields() self.assertEqual({ 'groups': { 'name': False, 'permissions': True }, 'name': True }, fields) def test_get_request_fields_disabled(self): self.view.features = (self.view.INCLUDE) request = Request(self.rf.get('/users/', { 'include[]': ['name', 'groups'], 'exclude[]': ['groups.name'] })) self.view.request = request fields = self.view.get_request_fields() self.assertEqual({ 'groups': True, 'name': True }, fields) def test_get_request_fields_invalid(self): for invalid_field in ('groups..name', 'groups..'): request = Request( self.rf.get('/users/', {'include[]': [invalid_field]})) self.view.request = request self.assertRaises( exceptions.ParseError, self.view.get_request_fields) def test_filter_extraction(self): filters_map = { 'attr': ['bar'], 'attr2.eq': ['bar'], '-attr3': ['bar'], 'rel|attr1': ['val'], '-rel|attr2': ['val'], 'rel.attr': ['baz'], 'rel.bar|attr': ['val'], 'attr4.lt': ['val'], 'attr5.in': ['val1', 'val2', 'val3'], } backend = DynamicFilterBackend() out = backend._get_requested_filters(filters_map=filters_map) self.assertEqual(out['_include']['attr'].value, 'bar') self.assertEqual(out['_include']['attr2'].value, 'bar') self.assertEqual(out['_exclude']['attr3'].value, 'bar') self.assertEqual(out['rel']['_include']['attr1'].value, 'val') self.assertEqual(out['rel']['_exclude']['attr2'].value, 'val') self.assertEqual(out['_include']['rel__attr'].value, 'baz') self.assertEqual(out['rel']['bar']['_include']['attr'].value, 'val') self.assertEqual(out['_include']['attr4__lt'].value, 'val') self.assertEqual(len(out['_include']['attr5__in'].value), 3) def test_is_null_casting(self): filters_map = { 'f1.isnull': [True], 'f2.isnull': [['a']], 'f3.isnull': ['true'], 'f4.isnull': ['1'], 'f5.isnull': [1], 'f6.isnull': [False], 'f7.isnull': [[]], 'f8.isnull': ['false'], 'f9.isnull': ['0'], 'f10.isnull': [0], 'f11.isnull': [''], 'f12.isnull': [None], } backend = DynamicFilterBackend() out = backend._get_requested_filters(filters_map=filters_map) self.assertEqual(out['_include']['f1__isnull'].value, True) self.assertEqual(out['_include']['f2__isnull'].value, ['a']) self.assertEqual(out['_include']['f3__isnull'].value, True) self.assertEqual(out['_include']['f4__isnull'].value, True) self.assertEqual(out['_include']['f5__isnull'].value, 1) self.assertEqual(out['_include']['f6__isnull'].value, False) self.assertEqual(out['_include']['f7__isnull'].value, []) self.assertEqual(out['_include']['f8__isnull'].value, False) self.assertEqual(out['_include']['f9__isnull'].value, False) self.assertEqual(out['_include']['f10__isnull'].value, False) self.assertEqual(out['_include']['f11__isnull'].value, False) self.assertEqual(out['_include']['f12__isnull'].value, None) def test_nested_filter_rewrite(self): node = FilterNode(['members', 'id'], 'in', [1]) gs = GroupSerializer(include_fields='*') filter_key, field = node.generate_query_key(gs) self.assertEqual(filter_key, 'users__id__in')
class TestUserViewSet(TestCase): def setUp(self): pass def test_get_request_fields(self): pass def test_get_request_fields_disabled(self): pass def test_get_request_fields_invalid(self): pass def test_filter_extraction(self): pass def test_is_null_casting(self): pass def test_nested_filter_rewrite(self): pass
8
0
14
1
13
0
1
0
1
4
4
0
7
2
7
7
108
13
95
25
87
0
54
25
46
2
1
1
8
4,248
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/prefetch.py
dynamic_rest.prefetch.FastPrefetch
class FastPrefetch(object): def __init__(self, field, queryset=None): if isinstance(queryset, models.Manager): queryset = queryset.all() if isinstance(queryset, QuerySet): queryset = FastQuery(queryset) assert (queryset is None or isinstance(queryset, FastQuery)) self.field = field self.query = queryset @classmethod def make_from_field(cls, model=None, field_name=None, field=None): assert (model and field_name) or field, ( 'make_from_field required model+field_name or field' ) # For nested prefetch, only handle first level. field_parts = field_name.split('__') field_name = field_parts[0] nested_prefetches = '__'.join(field_parts[1:]) field, ftype = get_model_field_and_type(model, field_name) if not ftype: raise RuntimeError("%s is not prefetchable" % field_name) qs = get_remote_model(field).objects.all() field_name = field_name or field.name prefetch = cls(field_name, qs) # For nested prefetch, recursively pass down remainder if nested_prefetches: prefetch.query.prefetch_related(nested_prefetches) return prefetch @classmethod def make_from_prefetch(cls, prefetch, parent_model): assert isinstance(prefetch, Prefetch) if isinstance(prefetch.queryset, FastQuery): return cls( prefetch.prefetch_through, prefetch.queryset ) else: return cls.make_from_field( model=parent_model, field_name=prefetch.prefetch_through )
class FastPrefetch(object): def __init__(self, field, queryset=None): pass @classmethod def make_from_field(cls, model=None, field_name=None, field=None): pass @classmethod def make_from_prefetch(cls, prefetch, parent_model): pass
6
0
16
3
12
1
3
0.05
1
2
1
0
1
2
3
3
52
11
39
13
33
2
28
11
24
3
1
1
8
4,249
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/prefetch.py
dynamic_rest.prefetch.FastObject
class FastObject(dict): def __init__(self, *args, **kwargs): self.pk_field = kwargs.pop('pk_field', 'id') return super(FastObject, self).__init__(*args) @property def pk(self): return self[self.pk_field] def _slow_getattr(self, name): if '.' in name: parts = name.split('.') obj = self for part in parts: obj = obj[part] return obj elif name == '*': return self else: raise AttributeError(name) def __getattr__(self, name): try: return self[name] except KeyError: # Fast approach failed, fall back on slower logic. return self._slow_getattr(name) def __setattr__(self, name, value): if name != 'pk_field' and name != 'pk': self[name] = value else: super(FastObject, self).__setattr__(name, value)
class FastObject(dict): def __init__(self, *args, **kwargs): pass @property def pk(self): pass def _slow_getattr(self, name): pass def __getattr__(self, name): pass def __setattr__(self, name, value): pass
7
0
5
0
5
0
2
0.04
1
3
0
0
5
1
5
32
34
5
28
11
21
1
24
10
18
4
2
2
10
4,250
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/prefetch.py
dynamic_rest.prefetch.FastList
class FastList(list): # shim for related m2m record sets def all(self): return self
class FastList(list): def all(self): pass
2
0
2
0
2
0
1
0.33
1
0
0
0
1
0
1
34
4
0
3
2
1
1
3
2
1
1
2
0
1
4,251
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/paginator.py
dynamic_rest.paginator.DynamicPaginator
class DynamicPaginator(Paginator): def __init__(self, *args, **kwargs): self.exclude_count = kwargs.pop('exclude_count', False) super().__init__(*args, **kwargs) def validate_number(self, number): """Validate the given 1-based page number.""" try: number = int(number) except (TypeError, ValueError): raise PageNotAnInteger(_('That page number is not an integer')) if number < 1: raise EmptyPage(_('That page number is less than 1')) if self.exclude_count: # skip validating against num_pages return number if number > self.num_pages: if number == 1 and self.allow_empty_first_page: pass else: raise EmptyPage(_('That page contains no results')) return number def page(self, number): """Return a Page object for the given 1-based page number.""" number = self.validate_number(number) bottom = (number - 1) * self.per_page top = bottom + self.per_page if self.exclude_count: # always fetch one extra item # to determine if more pages are available # and skip validation against count top = top + 1 else: if top + self.orphans >= self.count: top = self.count return self._get_page(self.object_list[bottom:top], number, self) @cached_property def count(self): """Return the total number of objects, across all pages.""" if self.exclude_count: # always return 0, count should not be called return 0 c = getattr(self.object_list, 'count', None) if callable(c) and not inspect.isbuiltin(c) and method_has_no_args(c): return c() return len(self.object_list) @cached_property def num_pages(self): """Return the total number of pages.""" if self.exclude_count: # always return 1, count should not be called return 1 if self.count == 0 and not self.allow_empty_first_page: return 0 hits = max(1, self.count - self.orphans) return int(ceil(hits / float(self.per_page)))
class DynamicPaginator(Paginator): def __init__(self, *args, **kwargs): pass def validate_number(self, number): '''Validate the given 1-based page number.''' pass def page(self, number): '''Return a Page object for the given 1-based page number.''' pass @cached_property def count(self): '''Return the total number of objects, across all pages.''' pass @cached_property def num_pages(self): '''Return the total number of pages.''' pass
8
4
11
0
8
2
3
0.22
1
5
0
0
5
1
5
5
62
7
45
13
37
10
41
11
35
6
1
2
16
4,252
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/pagination.py
dynamic_rest.pagination.DynamicPageNumberPagination
class DynamicPageNumberPagination(PageNumberPagination): """A subclass of PageNumberPagination. Adds support for pagination metadata and overrides for pagination query parameters. """ page_size_query_param = settings.PAGE_SIZE_QUERY_PARAM exclude_count_query_param = settings.EXCLUDE_COUNT_QUERY_PARAM page_query_param = settings.PAGE_QUERY_PARAM max_page_size = settings.MAX_PAGE_SIZE page_size = settings.PAGE_SIZE or api_settings.PAGE_SIZE django_paginator_class = DynamicPaginator def get_page_metadata(self): # always returns page, per_page # also returns total_results and total_pages # (unless EXCLUDE_COUNT_QUERY_PARAM is set) meta = { 'page': self.page.number, 'per_page': self.get_page_size(self.request) } if not self.exclude_count: meta['total_results'] = self.page.paginator.count meta['total_pages'] = self.page.paginator.num_pages else: meta['more_pages'] = self.more_pages return meta def get_paginated_response(self, data): meta = self.get_page_metadata() result = None if isinstance(data, list): result = OrderedDict() if not self.exclude_count: result['count'] = self.page.paginator.count result['next'] = self.get_next_link() result['previous'] = self.get_previous_link() result['results'] = data result['meta'] = meta else: result = data if 'meta' in result: result['meta'].update(meta) else: result['meta'] = meta return Response(result) @cached_property def exclude_count(self): return self.request.query_params.get(self.exclude_count_query_param) def get_page_number(self, request, paginator): page_number = request.query_params.get(self.page_query_param, 1) if page_number in self.last_page_strings: page_number = paginator.num_pages return page_number def paginate_queryset(self, queryset, request, **other): """ Paginate a queryset if required, either returning a page object, or `None` if pagination is not configured for this view. """ if 'exclude_count' in self.__dict__: self.__dict__.pop('exclude_count') page_size = self.get_page_size(request) if not page_size: return None self.request = request paginator = self.django_paginator_class( queryset, page_size, exclude_count=self.exclude_count ) page_number = self.get_page_number(request, paginator) try: self.page = paginator.page(page_number) except InvalidPage as exc: msg = self.invalid_page_message.format( page_number=page_number, message=str(exc) ) raise NotFound(msg) if paginator.num_pages > 1 and self.template is not None: # The browsable API should display pagination controls. self.display_page_controls = True result = list(self.page) if self.exclude_count: if len(result) > page_size: # if exclude_count is set, we fetch one extra item result = result[:page_size] self.more_pages = True else: self.more_pages = False return result
class DynamicPageNumberPagination(PageNumberPagination): '''A subclass of PageNumberPagination. Adds support for pagination metadata and overrides for pagination query parameters. ''' def get_page_metadata(self): pass def get_paginated_response(self, data): pass @cached_property def exclude_count(self): pass def get_page_number(self, request, paginator): pass def paginate_queryset(self, queryset, request, **other): ''' Paginate a queryset if required, either returning a page object, or `None` if pagination is not configured for this view. ''' pass
7
2
16
1
13
2
3
0.18
1
3
0
0
5
4
5
5
97
12
72
27
65
13
60
25
54
7
1
2
16
4,253
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/metadata.py
dynamic_rest.metadata.DynamicMetadata
class DynamicMetadata(SimpleMetadata): """A subclass of SimpleMetadata. Adds `properties` and `features` to the metdata. """ def determine_actions(self, request, view): """Prevent displaying action-specific details.""" return None def determine_metadata(self, request, view): """Adds `properties` and `features` to the metadata response.""" metadata = super( DynamicMetadata, self).determine_metadata( request, view) metadata['features'] = getattr(view, 'features', []) if hasattr(view, 'get_serializer'): serializer = view.get_serializer(dynamic=False) if hasattr(serializer, 'get_name'): metadata['resource_name'] = serializer.get_name() if hasattr(serializer, 'get_plural_name'): metadata['resource_name_plural'] = serializer.get_plural_name() metadata['properties'] = self.get_serializer_info(serializer) return metadata def get_field_info(self, field): """Adds `related_to` and `nullable` to the metadata response.""" field_info = OrderedDict() for attr in ('required', 'read_only', 'default', 'label'): field_info[attr] = getattr(field, attr) if field_info['default'] is empty: field_info['default'] = None if hasattr(field, 'immutable'): field_info['immutable'] = field.immutable field_info['nullable'] = field.allow_null if hasattr(field, 'choices'): field_info['choices'] = [ { 'value': choice_value, 'display_name': force_str(choice_name, strings_only=True) } for choice_value, choice_name in field.choices.items() ] many = False if isinstance(field, DynamicRelationField): field = field.serializer if isinstance(field, ListSerializer): field = field.child many = True if isinstance(field, ModelSerializer): type = 'many' if many else 'one' field_info['related_to'] = field.get_plural_name() else: type = self.label_lookup[field] field_info['type'] = type return field_info
class DynamicMetadata(SimpleMetadata): '''A subclass of SimpleMetadata. Adds `properties` and `features` to the metdata. ''' def determine_actions(self, request, view): '''Prevent displaying action-specific details.''' pass def determine_metadata(self, request, view): '''Adds `properties` and `features` to the metadata response.''' pass def get_field_info(self, field): '''Adds `related_to` and `nullable` to the metadata response.''' pass
4
4
17
0
16
1
5
0.13
1
3
1
0
3
0
3
3
59
5
48
10
44
6
37
10
33
9
1
2
14
4,254
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/filters.py
dynamic_rest.filters.FilterNode
class FilterNode(object): def __init__(self, field, operator, value): """Create an object representing a filter, to be stored in a TreeMap. For example, a filter query like `filter{users.events.capacity.lte}=1` would be passed into a `FilterNode` as follows: ``` field = ['users', 'events', 'capacity'] operator = 'lte' value = 1 node = FilterNode(field, operator, value) ``` Arguments: field: A list of field parts. operator: A valid filter operator, or None. Per Django convention, `None` means the equality operator. value: The value to filter on. """ self.field = field self.operator = operator self.value = value @property def key(self): return '%s%s' % ( '__'.join(self.field), '__' + self.operator if self.operator else '', ) def generate_query_key(self, serializer): """Get the key that can be passed to Django's filter method. To account for serialier field name rewrites, this method translates serializer field names to model field names by inspecting `serializer`. For example, a query like `filter{users.events}` would be returned as `users__events`. Arguments: serializer: A DRF serializer Returns: A filter key. """ rewritten = [] last = len(self.field) - 1 s = serializer field = None for i, field_name in enumerate(self.field): # Note: .fields can be empty for related serializers that aren't # sideloaded. Fields that are deferred also won't be present. # If field name isn't in serializer.fields, get full list from # get_all_fields() method. This is somewhat expensive, so only do # this if we have to. fields = s.fields if field_name not in fields: fields = getattr(s, 'get_all_fields', lambda: {})() if field_name == 'pk': rewritten.append('pk') continue if field_name not in fields: raise ValidationError("Invalid filter field: %s" % field_name) field = fields[field_name] # For remote fields, strip off '_set' for filtering. This is a # weird Django inconsistency. model_field_name = field.source or field_name model_field = get_model_field(s.get_model(), model_field_name) if isinstance(model_field, RelatedObject): model_field_name = model_field.field.related_query_name() # If get_all_fields() was used above, field could be unbound, # and field.source would be None rewritten.append(model_field_name) if i == last: break # Recurse into nested field s = getattr(field, 'serializer', None) if isinstance(s, serializers.ListSerializer): s = s.child if not s: raise ValidationError("Invalid nested filter field: %s" % field_name) if self.operator: rewritten.append(self.operator) return ('__'.join(rewritten), field)
class FilterNode(object): def __init__(self, field, operator, value): '''Create an object representing a filter, to be stored in a TreeMap. For example, a filter query like `filter{users.events.capacity.lte}=1` would be passed into a `FilterNode` as follows: ``` field = ['users', 'events', 'capacity'] operator = 'lte' value = 1 node = FilterNode(field, operator, value) ``` Arguments: field: A list of field parts. operator: A valid filter operator, or None. Per Django convention, `None` means the equality operator. value: The value to filter on. ''' pass @property def key(self): pass def generate_query_key(self, serializer): '''Get the key that can be passed to Django's filter method. To account for serialier field name rewrites, this method translates serializer field names to model field names by inspecting `serializer`. For example, a query like `filter{users.events}` would be returned as `users__events`. Arguments: serializer: A DRF serializer Returns: A filter key. ''' pass
5
2
30
5
13
12
4
0.88
1
1
0
0
3
3
3
3
95
18
41
16
36
36
37
15
33
10
1
2
13
4,255
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/filters.py
dynamic_rest.filters.FastDynamicFilterBackend
class FastDynamicFilterBackend(DynamicFilterBackend): def _create_prefetch(self, source, queryset): return FastPrefetch(source, queryset=queryset) def _get_queryset(self, queryset=None, serializer=None): queryset = super(FastDynamicFilterBackend, self)._get_queryset( queryset=queryset, serializer=serializer ) if not isinstance(queryset, FastQuery): queryset = FastQuery(queryset) return queryset def _make_model_queryset(self, model): queryset = super(FastDynamicFilterBackend, self)._make_model_queryset(model) return FastQuery(queryset) def _serializer_filter(self, serializer=None, queryset=None): queryset.queryset = serializer.filter_queryset(queryset.queryset) return queryset
class FastDynamicFilterBackend(DynamicFilterBackend): def _create_prefetch(self, source, queryset): pass def _get_queryset(self, queryset=None, serializer=None): pass def _make_model_queryset(self, model): pass def _serializer_filter(self, serializer=None, queryset=None): pass
5
0
4
1
4
0
1
0
1
3
2
0
4
0
4
17
21
5
16
6
11
0
14
6
9
2
2
1
5
4,256
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/filters.py
dynamic_rest.filters.DynamicSortingFilter
class DynamicSortingFilter(OrderingFilter): """Subclass of DRF's OrderingFilter. This class adds support for multi-field ordering and rewritten fields. """ def filter_queryset(self, request, queryset, view): """"Filter the queryset, applying the ordering. The `ordering_param` can be overwritten here. In DRF, the ordering_param is 'ordering', but we support changing it to allow the viewset to control the parameter. """ self.ordering_param = view.SORT ordering = self.get_ordering(request, queryset, view) if ordering: queryset = queryset.order_by(*ordering) if any(['__' in o for o in ordering]): # add distinct() to remove duplicates # in case of order-by-related queryset = queryset.distinct() return queryset def get_ordering(self, request, queryset, view): """Return an ordering for a given request. DRF expects a comma separated list, while DREST expects an array. This method overwrites the DRF default so it can parse the array. """ params = view.get_request_feature(view.SORT) if params: fields = [param.strip() for param in params] valid_ordering, invalid_ordering = self.remove_invalid_fields( queryset, fields, view ) # if any of the sort fields are invalid, throw an error. # else return the ordering if invalid_ordering: raise ValidationError("Invalid filter field: %s" % invalid_ordering) else: return valid_ordering # No sorting was included return self.get_default_ordering(view) def remove_invalid_fields(self, queryset, fields, view): """Remove invalid fields from an ordering. Overwrites the DRF default remove_invalid_fields method to return both the valid orderings and any invalid orderings. """ valid_orderings = [] invalid_orderings = [] # for each field sent down from the query param, # determine if its valid or invalid for term in fields: stripped_term = term.lstrip('-') # add back the '-' add the end if necessary reverse_sort_term = '' if len(stripped_term) is len(term) else '-' ordering = self.ordering_for(stripped_term, view) if ordering: valid_orderings.append(reverse_sort_term + ordering) else: invalid_orderings.append(term) return valid_orderings, invalid_orderings def ordering_for(self, term, view): """ Return ordering (model field chain) for term (serializer field chain) or None if invalid Raise ImproperlyConfigured if serializer_class not set on view """ if not self._is_allowed_term(term, view): return None serializer = self._get_serializer_class(view)() serializer_chain = term.split('.') model_chain = [] for segment in serializer_chain[:-1]: field = serializer.get_all_fields().get(segment) if not ( field and field.source != '*' and isinstance(field, DynamicRelationField) ): return None model_chain.append(field.source or segment) serializer = field.serializer_class() last_segment = serializer_chain[-1] last_field = serializer.get_all_fields().get(last_segment) if not last_field or last_field.source == '*': return None model_chain.append(last_field.source or last_segment) return '__'.join(model_chain) def _is_allowed_term(self, term, view): valid_fields = getattr(view, 'ordering_fields', self.ordering_fields) all_fields_allowed = valid_fields is None or valid_fields == '__all__' return all_fields_allowed or term in valid_fields def _get_serializer_class(self, view): # prefer the overriding method if hasattr(view, 'get_serializer_class'): try: serializer_class = view.get_serializer_class() except AssertionError: # Raised by the default implementation if # no serializer_class was found serializer_class = None # use the attribute else: serializer_class = getattr(view, 'serializer_class', None) # neither a method nor an attribute has been specified if serializer_class is None: msg = ( "Cannot use %s on a view which does not have either a " "'serializer_class' or an overriding 'get_serializer_class'." ) raise ImproperlyConfigured(msg % self.__class__.__name__) return serializer_class
class DynamicSortingFilter(OrderingFilter): '''Subclass of DRF's OrderingFilter. This class adds support for multi-field ordering and rewritten fields. ''' def filter_queryset(self, request, queryset, view): '''"Filter the queryset, applying the ordering. The `ordering_param` can be overwritten here. In DRF, the ordering_param is 'ordering', but we support changing it to allow the viewset to control the parameter. ''' pass def get_ordering(self, request, queryset, view): '''Return an ordering for a given request. DRF expects a comma separated list, while DREST expects an array. This method overwrites the DRF default so it can parse the array. ''' pass def remove_invalid_fields(self, queryset, fields, view): '''Remove invalid fields from an ordering. Overwrites the DRF default remove_invalid_fields method to return both the valid orderings and any invalid orderings. ''' pass def ordering_for(self, term, view): ''' Return ordering (model field chain) for term (serializer field chain) or None if invalid Raise ImproperlyConfigured if serializer_class not set on view ''' pass def _is_allowed_term(self, term, view): pass def _get_serializer_class(self, view): pass
7
5
21
4
12
5
3
0.46
1
2
1
0
6
1
6
6
140
32
74
29
67
34
62
29
55
5
1
2
20
4,257
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/filters.py
dynamic_rest.filters.DynamicFilterBackend
class DynamicFilterBackend(BaseFilterBackend): """A DRF filter backend that constructs DREST querysets. This backend is responsible for interpretting and applying filters, includes, and excludes to the base queryset of a view. Attributes: VALID_FILTER_OPERATORS: A list of filter operators. """ VALID_FILTER_OPERATORS = ( 'in', 'any', 'all', 'icontains', 'contains', 'startswith', 'istartswith', 'endswith', 'iendswith', 'year', 'month', 'day', 'week_day', 'regex', 'range', 'gt', 'lt', 'gte', 'lte', 'isnull', 'eq', 'iexact', None, ) def filter_queryset(self, request, queryset, view): """Filter the queryset. This is the main entry-point to this class, and is called by DRF's list handler. """ self.request = request self.view = view # enable addition of extra filters (i.e., a Q()) # so custom filters can be added to the queryset without # running into https://code.djangoproject.com/ticket/18437 # which, without this, would mean that filters added to the queryset # after this is called may not behave as expected extra_filters = self.view.get_extra_filters(request) disable_prefetches = self.view.is_update() self.DEBUG = settings.DEBUG return self._build_queryset( queryset=queryset, extra_filters=extra_filters, disable_prefetches=disable_prefetches, ) """ This function was renamed and broke downstream dependencies that haven't been updated to use the new naming convention. """ def _extract_filters(self, **kwargs): return self._get_requested_filters(**kwargs) def _get_requested_filters(self, **kwargs): """ Convert 'filters' query params into a dict that can be passed to Q. Returns a dict with two fields, 'include' and 'exclude', which can be used like: result = self._get_requested_filters() q = Q(**result['include'] & ~Q(**result['exclude']) """ out = TreeMap() filters_map = kwargs.get('filters_map') or self.view.get_request_feature( self.view.FILTER ) if getattr(self, 'view', None): out['_complex'] = self.view.get_request_feature(self.view.FILTER, raw=True) for spec, value in six.iteritems(filters_map): # Inclusion or exclusion? if spec[0] == '-': spec = spec[1:] inex = '_exclude' else: inex = '_include' # for relational filters, separate out relation path part if '|' in spec: rel, spec = spec.split('|') rel = rel.split('.') else: rel = None parts = spec.split('.') # Last part could be operator, e.g. "events.capacity.gte" if len(parts) > 1 and parts[-1] in self.VALID_FILTER_OPERATORS: operator = parts.pop() else: operator = None # All operators except 'range' and 'in' should have one value if operator == 'range': value = value[:2] elif operator == 'in': # no-op: i.e. accept `value` as an arbitrarily long list pass elif operator in self.VALID_FILTER_OPERATORS: value = value[0] if operator == 'isnull' and isinstance(value, six.string_types): value = is_truthy(value) elif operator == 'eq': operator = None node = FilterNode(parts, operator, value) # insert into output tree path = rel if rel else [] path += [inex, node.key] out.insert(path, node) return out def _filters_to_query(self, filters, serializer, q=None): """ Construct Django Query object from request. Arguments are dictionaries, which will be passed to Q() as kwargs. e.g. includes = { 'foo' : 'bar', 'baz__in' : [1, 2] } produces: Q(foo='bar', baz__in=[1, 2]) Arguments: includes: TreeMap representing inclusion filters. excludes: TreeMap representing exclusion filters. filters: TreeMap with include/exclude filters OR query map serializer: serializer instance of top-level object q: Q() object (optional) Returns: Q() instance or None if no inclusion or exclusion filters were specified. """ if ( not filters.get('_complex') ): includes = filters.get('_include') excludes = filters.get('_exclude') q = q or Q() if not includes and not excludes: return None if includes: includes = rewrite_filters(includes, serializer) q &= Q(**includes) if excludes: excludes = rewrite_filters(excludes, serializer) for k, v in six.iteritems(excludes): q &= ~Q(**{k: v}) return q else: filters = filters.get('_complex') ors = filters.get('.or') or filters.get('$or') ands = filters.get('.and') or filters.get('$and') if q is None: q = Q() if ors: result = reduce( OR, [self._filters_to_query({"_complex": f}, serializer) for f in ors] ) return result if ands: return reduce( AND, [self._filters_to_query({"_complex": f}, serializer) for f in ands] ) clauses = [ clause_to_q(clause, serializer) for clause in filters.items() ] return reduce(AND, clauses) if clauses else q def _create_prefetch(self, source, queryset): return Prefetch(source, queryset=queryset) def _build_implicit_prefetches(self, model, prefetches, requirements): """Build a prefetch dictionary based on internal requirements.""" for source, remainder in six.iteritems(requirements): if not remainder or isinstance(remainder, six.string_types): # no further requirements to prefetch continue related_field = get_model_field(model, source) related_model = get_related_model(related_field) queryset = ( self._build_implicit_queryset(related_model, remainder) if related_model else None ) prefetches[source] = self._create_prefetch(source, queryset) return prefetches def _make_model_queryset(self, model): return model.objects.all() def _build_implicit_queryset(self, model, requirements): """Build a queryset based on implicit requirements.""" queryset = self._make_model_queryset(model) prefetches = {} self._build_implicit_prefetches(model, prefetches, requirements) prefetch = prefetches.values() queryset = queryset.prefetch_related(*prefetch).distinct() if self.DEBUG: queryset._using_prefetches = prefetches return queryset def _build_requested_prefetches( self, prefetches, requirements, model, fields, filters ): """Build a prefetch dictionary based on request requirements.""" for name, field in six.iteritems(fields): original_field = field if isinstance(field, DynamicRelationField): field = field.serializer if isinstance(field, serializers.ListSerializer): field = field.child if not isinstance(field, serializers.ModelSerializer): continue source = field.source or name if '.' in source: raise ValidationError('nested relationship values are not supported') if source in prefetches: # ignore duplicated sources continue is_remote = is_field_remote(model, source) is_id_only = getattr(field, 'id_only', lambda: False)() if is_id_only and not is_remote: continue related_queryset = getattr(original_field, 'queryset', None) if callable(related_queryset): related_queryset = related_queryset(field) source = field.source or name # Popping the source here (during explicit prefetch construction) # guarantees that implicitly required prefetches that follow will # not conflict. required = requirements.pop(source, None) prefetch_queryset = self._build_queryset( serializer=field, filters=filters.get(name, {}), queryset=related_queryset, requirements=required, ) # Note: There can only be one prefetch per source, even # though there can be multiple fields pointing to # the same source. This could break in some cases, # but is mostly an issue on writes when we use all # fields by default. prefetches[source] = self._create_prefetch(source, prefetch_queryset) return prefetches def _get_implicit_requirements(self, fields, requirements): """Extract internal prefetch requirements from serializer fields.""" for name, field in six.iteritems(fields): source = field.source # Requires may be manually set on the field -- if not, # assume the field requires only its source. requires = getattr(field, 'requires', None) or [source] for require in requires: if not require: # ignore fields with empty source continue requirement = require.split('.') if requirement[-1] == '': # Change 'a.b.' -> 'a.b.*', # supporting 'a.b.' for backwards compatibility. requirement[-1] = '*' requirements.insert(requirement, TreeMap(), update=True) def _get_queryset(self, queryset=None, serializer=None): if serializer and queryset is None: queryset = serializer.Meta.model.objects return queryset def _serializer_filter(self, serializer=None, queryset=None): return serializer.filter_queryset(queryset) def _build_queryset( self, serializer=None, filters=None, queryset=None, requirements=None, extra_filters=None, disable_prefetches=False, ): """Build a queryset that pulls in all data required by this request. Handles nested prefetching of related data and deferring fields at the queryset level. Arguments: serializer: An optional serializer to use a base for the queryset. If no serializer is passed, the `get_serializer` method will be used to initialize the base serializer for the viewset. filters: An optional TreeMap of nested filters. queryset: An optional base queryset. requirements: An optional TreeMap of nested requirements. """ is_root_level = False if not serializer: serializer = self.view.get_serializer() is_root_level = True queryset = self._get_queryset(queryset=queryset, serializer=serializer) model = getattr(serializer.Meta, 'model', None) if not model: return queryset prefetches = {} # build a nested Prefetch queryset # based on request parameters and serializer fields fields = serializer.fields if requirements is None: requirements = TreeMap() self._get_implicit_requirements(fields, requirements) # Implicit requirements (i.e. via `requires`) can potentially # include fields that haven't been explicitly included. # Such fields would not be in `fields`, so they need to be added. implicitly_included = set(requirements.keys()) - set(fields.keys()) if implicitly_included: all_fields = serializer.get_all_fields() fields.update( { field: all_fields[field] for field in implicitly_included if field in all_fields } ) if filters is None: filters = self._get_requested_filters() # build nested Prefetch queryset self._build_requested_prefetches( prefetches, requirements, model, fields, filters ) # build remaining prefetches out of internal requirements # that are not already covered by request requirements self._build_implicit_prefetches(model, prefetches, requirements) # use requirements at this level to limit fields selected # only do this for GET requests where we are not requesting the # entire fieldset if ( '*' not in requirements and not self.view.is_update() and not self.view.is_delete() ): id_fields = getattr(serializer, 'get_id_fields', lambda: [])() # only include local model fields only = [ field for field in set(id_fields + list(requirements.keys())) if is_model_field(model, field) and not is_field_remote(model, field) ] queryset = queryset.only(*only) # add request filters query = self._filters_to_query(filters=filters, serializer=serializer) # add additional filters specified by calling view if extra_filters: query = extra_filters if not query else extra_filters & query if query: # Convert internal django ValidationError to # APIException-based one in order to resolve validation error # from 500 status code to 400. try: queryset = queryset.filter(query) except InternalValidationError as e: raise ValidationError(dict(e) if hasattr(e, 'error_dict') else list(e)) except Exception as e: # Some other Django error in parsing the filter. # Very likely a bad query, so throw a ValidationError. err_msg = getattr(e, 'message', '') raise ValidationError(err_msg) # A serializer can have this optional function # to dynamically apply additional filters on # any queries that will use that serializer # You could use this to have (for example) different # serializers for different subsets of a model or to # implement permissions which work even in sideloads if hasattr(serializer, 'filter_queryset'): queryset = self._serializer_filter(serializer=serializer, queryset=queryset) # add prefetches and remove duplicates if necessary prefetch = prefetches.values() if prefetch and not disable_prefetches: queryset = queryset.prefetch_related(*prefetch) elif isinstance(queryset, Manager): queryset = queryset.all() if has_joins(queryset) or not is_root_level: queryset = queryset.distinct() if self.DEBUG: queryset._using_prefetches = prefetches return queryset
class DynamicFilterBackend(BaseFilterBackend): '''A DRF filter backend that constructs DREST querysets. This backend is responsible for interpretting and applying filters, includes, and excludes to the base queryset of a view. Attributes: VALID_FILTER_OPERATORS: A list of filter operators. ''' def filter_queryset(self, request, queryset, view): '''Filter the queryset. This is the main entry-point to this class, and is called by DRF's list handler. ''' pass def _extract_filters(self, **kwargs): pass def _get_requested_filters(self, **kwargs): ''' Convert 'filters' query params into a dict that can be passed to Q. Returns a dict with two fields, 'include' and 'exclude', which can be used like: result = self._get_requested_filters() q = Q(**result['include'] & ~Q(**result['exclude']) ''' pass def _filters_to_query(self, filters, serializer, q=None): ''' Construct Django Query object from request. Arguments are dictionaries, which will be passed to Q() as kwargs. e.g. includes = { 'foo' : 'bar', 'baz__in' : [1, 2] } produces: Q(foo='bar', baz__in=[1, 2]) Arguments: includes: TreeMap representing inclusion filters. excludes: TreeMap representing exclusion filters. filters: TreeMap with include/exclude filters OR query map serializer: serializer instance of top-level object q: Q() object (optional) Returns: Q() instance or None if no inclusion or exclusion filters were specified. ''' pass def _create_prefetch(self, source, queryset): pass def _build_implicit_prefetches(self, model, prefetches, requirements): '''Build a prefetch dictionary based on internal requirements.''' pass def _make_model_queryset(self, model): pass def _build_implicit_queryset(self, model, requirements): '''Build a queryset based on implicit requirements.''' pass def _build_requested_prefetches( self, prefetches, requirements, model, fields, filters ): '''Build a prefetch dictionary based on request requirements.''' pass def _get_implicit_requirements(self, fields, requirements): '''Extract internal prefetch requirements from serializer fields.''' pass def _get_queryset(self, queryset=None, serializer=None): pass def _serializer_filter(self, serializer=None, queryset=None): pass def _build_queryset( self, serializer=None, filters=None, queryset=None, requirements=None, extra_filters=None, disable_prefetches=False, ): '''Build a queryset that pulls in all data required by this request. Handles nested prefetching of related data and deferring fields at the queryset level. Arguments: serializer: An optional serializer to use a base for the queryset. If no serializer is passed, the `get_serializer` method will be used to initialize the base serializer for the viewset. filters: An optional TreeMap of nested filters. queryset: An optional base queryset. requirements: An optional TreeMap of nested requirements. ''' pass
14
9
30
5
18
7
5
0.4
1
7
3
1
13
3
13
13
449
80
264
78
240
105
181
67
167
18
1
3
67
4,258
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/fields/generic.py
dynamic_rest.fields.generic.DynamicGenericRelationField
class DynamicGenericRelationField( WithRelationalFieldMixin, DynamicField ): def __init__(self, embed=False, *args, **kwargs): if 'requires' in kwargs: raise RuntimeError( "DynamicGenericRelationField does not support manual" " overriding of 'requires'." ) super(DynamicGenericRelationField, self).__init__(*args, **kwargs) self.embed = embed def bind(self, field_name, parent): super(DynamicGenericRelationField, self).bind(field_name, parent) source = self.source or field_name # Inject `requires` so required fields get prefetched properly. # TODO: It seems like we should be able to require the type and # id fields, but that seems to conflict with some internal # Django magic. Disabling `.only()` by requiring '*' seem # to work more reliably... self.requires = [ source + '.*', '*' ] # Get request fields to support sideloading, but disallow field # inclusion/exclusion. request_fields = self._get_request_fields_from_parent() if isinstance(request_fields, dict) and len(request_fields): raise ValidationError( "%s.%s does not support field inclusion/exclusion" % ( self.parent.get_name(), self.field_name ) ) self.request_fields = request_fields def id_only(self): # For DynamicRelationFields, id_only() is a serializer responsibility # but for generic relations, we want IDs to be represented differently # and that is a field-level concern, not an object-level concern, # so we handle it here. return not self.parent.is_field_sideloaded(self.field_name) def get_pk_object(self, type_key, id_value): return { 'type': type_key, 'id': id_value } def get_serializer_class_for_instance(self, instance): return DynamicRouter.get_canonical_serializer( resource_key=None, instance=instance ) def to_representation(self, instance): try: # Find serializer for the instance serializer_class = self.get_serializer_class_for_instance(instance) if not serializer_class: # Can't find canonical serializer! For now, just return # object name and ID, and hope the client knows what to do # with it. return self.get_pk_object( instance._meta.object_name, instance.pk ) # We want the pk to be represented as an object with type, # rather than just the ID. pk_value = self.get_pk_object( serializer_class.get_name(), instance.pk ) if self.id_only(): return pk_value # Serialize the object. Note that request_fields is set, but # field inclusion/exclusion is disallowed via check in bind() r = serializer_class( dynamic=True, request_fields=self.request_fields, context=self.context, embed=self.embed ).to_representation( instance ) # Pass pk object that contains type and ID to TaggedDict object # so that Processor can use it when the field gets sideloaded. if isinstance(r, TaggedDict): r.pk_value = pk_value return r except BaseException: # This feature should be considered to be in Beta so don't break # if anything unexpected happens. # TODO: Remove once we have more confidence. traceback.print_exc() return None def to_internal_value(self, data): model_name = data.get('type', None) model_id = data.get('id', None) if model_name and model_id: serializer_class = DynamicRouter.get_canonical_serializer( resource_key=None, resource_name=model_name ) if serializer_class: model = serializer_class.get_model() return model.objects.get(id=model_id) if model else None return None
class DynamicGenericRelationField( WithRelationalFieldMixin, DynamicField ): def __init__(self, embed=False, *args, **kwargs): pass def bind(self, field_name, parent): pass def id_only(self): pass def get_pk_object(self, type_key, id_value): pass def get_serializer_class_for_instance(self, instance): pass def to_representation(self, instance): pass def to_internal_value(self, data): pass
8
0
15
1
11
3
2
0.3
2
6
2
0
7
4
7
13
119
15
80
24
69
24
44
20
36
5
3
2
16
4,259
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/prefetch.py
dynamic_rest.prefetch.FastQuery
class FastQuery(FastQueryCompatMixin, object): def __init__(self, queryset): if isinstance(queryset, models.Manager): queryset = queryset.all() self.queryset = queryset self.model = queryset.model self.prefetches = {} self.fields = None self.pk_field = queryset.model._meta.pk.attname self._data = None self._my_ids = None def execute(self): if self._data is not None: return self._data # TODO: check if queryset already has values() called # TODO: use self.fields qs = self.queryset._clone() use_fastquery = getattr(self.model, 'USE_FASTQUERY', True) if use_fastquery: data = list(qs.values()) self.merge_prefetch(data) self._data = FastList( map(lambda obj: FastObject(obj, pk_field=self.pk_field), data) ) else: def make_prefetch(fast_prefetch): queryset = None if fast_prefetch.query is not None: queryset = fast_prefetch.query.queryset return Prefetch( fast_prefetch.field, queryset=queryset ) prefetches = [ make_prefetch( prefetch ) for prefetch in self.prefetches.values() ] if len(prefetches) > 0: qs = qs.prefetch_related(*prefetches) self._data = FastList( map(lambda obj: SlowObject( obj, pk_field=self.pk_field ), qs.all()) ) return self._data def __iter__(self): """Allow this to be cast to an iterable. Note: as with Django QuerySets, calling this will cause the query to execute. """ return iter(self.execute()) def __getitem__(self, k): """Support list index and slicing, similar to Django QuerySet.""" if self._data is not None: # Query has already been executed. Extract from local cache. return self._data[k] # Query hasn't yet been executed. Update queryset. if isinstance(k, slice): if k.start is not None: start = int(k.start) else: start = None if k.stop is not None: stop = int(k.stop) else: stop = None if k.step: raise TypeError("Stepping not supported") self.queryset.query.set_limits(start, stop) return self.execute() else: self.queryset.query.set_limits(k, k+1) return self.execute() def __len__(self): return len(self.execute()) def get_ids(self, ids): self.queryset = self.queryset.filter(pk__in=ids) return self def merge_prefetch(self, data): model = self.queryset.model rel_func_map = { 'fk': self.merge_fk, 'o2o': self.merge_o2o, 'o2or': self.merge_o2or, 'm2m': self.merge_m2m, 'm2o': self.merge_m2o, } for prefetch in self.prefetches.values(): # TODO: here we assume we're dealing with Prefetch objects # we could support field notation as well. field, rel_type = get_model_field_and_type( model, prefetch.field ) if not rel_type: # Not a relational field... weird. # TODO: maybe raise? continue func = rel_func_map[rel_type] func(data, field, prefetch) return data def _make_id_map(self, items, pk_field='id'): return { item[pk_field]: item for item in items } def _get_my_ids(self, data): if self._my_ids is None: pk_field = self.queryset.model._meta.pk.attname self._my_ids = {o[pk_field] for o in data} return self._my_ids def merge_fk(self, data, field, prefetch): # Strategy: pull out field_id values from each row, pass to # prefetch queryset using `pk__in`. id_field = field.attname ids = set([ row[id_field] for row in data if id_field in row ]) prefetched_data = prefetch.query.get_ids(ids).execute() id_map = self._make_id_map(prefetched_data) for row in data: row[field.name] = id_map.get(row[id_field], None) return data def merge_o2o(self, data, field, prefetch): # Same as FK. return self.merge_fk(data, field, prefetch) def merge_o2or(self, data, field, prefetch, m2o_mode=False): # Strategy: get my IDs, filter remote model for rows pointing at # my IDs. # For m2o_mode, account for there many objects, while # for o2or only support one reverse object. my_ids = self._get_my_ids(data) # If prefetching User.profile, construct filter like: # Profile.objects.filter(user__in=<user_ids>) remote_field = reverse_o2o_field_name(field) remote_filter_key = '%s__in' % remote_field filter_args = {remote_filter_key: my_ids} # Fetch remote objects remote_objects = prefetch.query.filter(**filter_args).execute() id_map = self._make_id_map(data, pk_field=self.pk_field) field_name = prefetch.field reverse_found = set() # IDs of local objects that were reversed for remote_obj in remote_objects: # Pull out ref on remote object pointing at us, and # get local object. There *should* always be a matching # local object because the remote objects were filtered # for those that referenced the local IDs. reverse_ref = remote_obj[remote_field] local_obj = id_map[reverse_ref] if m2o_mode: # in many-to-one mode, this is a list if field_name not in local_obj: local_obj[field_name] = FastList([]) local_obj[field_name].append(remote_obj) else: # in o2or mode, there can only be one local_obj[field_name] = remote_obj reverse_found.add(reverse_ref) # Set value to None for objects that didn't have a matching prefetch not_found = my_ids - reverse_found for pk in not_found: id_map[pk][field_name] = FastList([]) if m2o_mode else None return data def merge_m2m(self, data, field, prefetch): # Strategy: pull out all my IDs, do a reverse filter on remote object. # e.g.: If prefetching User.groups, do # Groups.filter(users__in=<user_ids>) my_ids = self._get_my_ids(data) base_qs = prefetch.query.queryset # base queryset on remote model remote_pk_field = base_qs.model._meta.pk.attname # get pk field name reverse_field = reverse_m2m_field_name(field) if reverse_field is None: # Note: We can't just reuse self.queryset here because it's # been sliced already. filters = { field.attname + '__isnull': False } qs = self.queryset.model.objects.filter( pk__in=my_ids, **filters ) joins = list(qs.values_list( field.attname, self.pk_field )) else: # Get reverse mapping (for User.groups, get Group.users) # Note: `qs` already has base filter applied on remote model. filters = { reverse_field+'__in': my_ids } joins = list(base_qs.filter(**filters).values_list( remote_pk_field, reverse_field )) # Fetch remote objects, as values. remote_ids = set([o[0] for o in joins]) remote_objects = prefetch.query.get_ids(remote_ids).execute() id_map = self._make_id_map(remote_objects, pk_field=remote_pk_field) # Create mapping of local ID -> remote objects to_field = prefetch.field object_map = defaultdict(FastList) for remote_id, local_id in joins: if remote_id in id_map: object_map[local_id].append(id_map[remote_id]) # Merge into working data set. for row in data: row[to_field] = object_map[row[self.pk_field]] return data def merge_m2o(self, data, field, prefetch): # Same as o2or but allow for many reverse objects. return self.merge_o2or(data, field, prefetch, m2o_mode=True)
class FastQuery(FastQueryCompatMixin, object): def __init__(self, queryset): pass def execute(self): pass def make_prefetch(fast_prefetch): pass def __iter__(self): '''Allow this to be cast to an iterable. Note: as with Django QuerySets, calling this will cause the query to execute. ''' pass def __getitem__(self, k): '''Support list index and slicing, similar to Django QuerySet.''' pass def __len__(self): pass def get_ids(self, ids): pass def merge_prefetch(self, data): pass def _make_id_map(self, items, pk_field='id'): pass def _get_my_ids(self, data): pass def merge_fk(self, data, field, prefetch): pass def merge_o2o(self, data, field, prefetch): pass def merge_o2or(self, data, field, prefetch, m2o_mode=False): pass def merge_m2m(self, data, field, prefetch): pass def merge_m2o(self, data, field, prefetch): pass
16
2
17
2
12
3
3
0.26
2
9
3
0
14
7
14
28
256
46
169
67
153
44
126
67
110
6
2
3
38
4,260
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/fields/fields.py
dynamic_rest.fields.fields.DynamicRelationField
class DynamicRelationField(WithRelationalFieldMixin, DynamicField): """Field proxy for a nested serializer. Supports passing in the child serializer as a class or string, and resolves to the class after binding to the parent serializer. Will proxy certain arguments to the child serializer. Attributes: SERIALIZER_KWARGS: list of arguments that are passed to the child serializer. """ SERIALIZER_KWARGS = set(('many', 'source')) def __init__( self, serializer_class, many=False, queryset=None, embed=False, sideloading=None, debug=False, **kwargs ): """ Arguments: serializer_class: Serializer class (or string representation) to proxy. many: Boolean, if relation is to-many. queryset: Default queryset to apply when filtering for related objects. sideloading: if True, force sideloading all the way down. if False, force embedding all the way down. This overrides the "embed" option if set. embed: If True, always embed related object(s). Will not sideload, and will include the full object unless specifically excluded. """ self._serializer_class = serializer_class self.bound = False self.queryset = queryset self.sideloading = sideloading self.debug = debug self.embed = embed if sideloading is None else not sideloading if '.' in kwargs.get('source', ''): raise Exception('Nested relationships are not supported') if 'link' in kwargs: self.link = kwargs.pop('link') super(DynamicRelationField, self).__init__(**kwargs) self.kwargs['many'] = self.many = many def get_model(self): """Get the child serializer's model.""" return getattr(self.serializer_class.Meta, 'model', None) def bind(self, *args, **kwargs): """Bind to the parent serializer.""" if self.bound: # Prevent double-binding return super(DynamicRelationField, self).bind(*args, **kwargs) self.bound = True parent_model = getattr(self.parent.Meta, 'model', None) remote = is_field_remote(parent_model, self.source) try: model_field = get_model_field(parent_model, self.source) except BaseException: # model field may not be available for m2o fields with no # related_name model_field = None # Infer `required` and `allow_null` if 'required' not in self.kwargs and ( remote or ( model_field and ( model_field.has_default() or model_field.null ) ) ): self.required = False if 'allow_null' not in self.kwargs and getattr( model_field, 'null', False ): self.allow_null = True self.model_field = model_field @resettable_cached_property def root_serializer(self): """Return the root serializer (serializer for the primary resource).""" if not self.parent: # Don't cache, so that we'd recompute if parent is set. return None node = self seen = set() while True: seen.add(node) if getattr(node, 'parent', None): node = node.parent if node in seen: return None else: return node def _get_cached_serializer(self, args, init_args): enabled = settings.ENABLE_SERIALIZER_CACHE root = self.root_serializer if not root or not self.field_name or not enabled: # Not enough info to use cache. return self.serializer_class(*args, **init_args) if not hasattr(root, '_descendant_serializer_cache'): # Initialize dict to use as cache on root serializer. # Arguably this is a Serializer concern, but we'll do it # here so it's agnostic to the exact type of the root # serializer (i.e. it could be a DRF serializer). root._descendant_serializer_cache = {} key_dict = { 'parent': self.parent.__class__.__name__, 'field': self.field_name, 'args': args, 'init_args': init_args, } cache_key = hash(pickle.dumps(key_dict)) if cache_key not in root._descendant_serializer_cache: szr = self.serializer_class( *args, **init_args ) root._descendant_serializer_cache[cache_key] = szr else: root._descendant_serializer_cache[cache_key].reset() return root._descendant_serializer_cache[cache_key] def _inherit_parent_kwargs(self, kwargs): """Extract any necessary attributes from parent serializer to propagate down to child serializer. """ if not self.parent or not self._is_dynamic: return kwargs if 'request_fields' not in kwargs: # If 'request_fields' isn't explicitly set, pull it from the # parent serializer. request_fields = self._get_request_fields_from_parent() if request_fields is None: # Default to 'id_only' for nested serializers. request_fields = True kwargs['request_fields'] = request_fields if self.embed and kwargs.get('request_fields') is True: # If 'embed' then make sure we fetch the full object. kwargs['request_fields'] = {} if hasattr(self.parent, 'sideloading'): kwargs['sideloading'] = self.parent.sideloading if hasattr(self.parent, 'debug'): kwargs['debug'] = self.parent.debug return kwargs def get_serializer(self, *args, **kwargs): """Get an instance of the child serializer.""" init_args = { k: v for k, v in six.iteritems(self.kwargs) if k in self.SERIALIZER_KWARGS } kwargs = self._inherit_parent_kwargs(kwargs) init_args.update(kwargs) if self.embed and self._is_dynamic: init_args['embed'] = True serializer = self._get_cached_serializer(args, init_args) serializer.parent = self return serializer @resettable_cached_property def serializer(self): return self.get_serializer() @cached_property def _is_dynamic(self): """Return True if the child serializer is dynamic.""" return issubclass( self.serializer_class, DynamicSerializerBase ) def get_attribute(self, instance): serializer = self.serializer model = serializer.get_model() # attempt to optimize by reading the related ID directly # from the current instance rather than from the related object if not self.kwargs['many'] and serializer.id_only(): return instance elif model is not None: try: return getattr(instance, self.source) except model.DoesNotExist: return None else: return instance def to_representation(self, instance): """Represent the relationship, either as an ID or object.""" serializer = self.serializer model = serializer.get_model() source = self.source if not self.kwargs['many'] and serializer.id_only(): # attempt to optimize by reading the related ID directly # from the current instance rather than from the related object source_id = '%s_id' % source # try the faster way first: if hasattr(instance, source_id): return getattr(instance, source_id) elif model is not None: # this is probably a one-to-one field, or a reverse related # lookup, so let's look it up the slow way and let the # serializer handle the id dereferencing try: instance = getattr(instance, source) except model.DoesNotExist: instance = None # dereference ephemeral objects if model is None: instance = getattr(instance, source) if instance is None: return None return serializer.to_representation(instance) def to_internal_value_single(self, data, serializer): """Return the underlying object, given the serialized form.""" related_model = serializer.Meta.model if isinstance(data, related_model): return data try: instance = related_model.objects.get(pk=data) except related_model.DoesNotExist: raise ValidationError( "Invalid value for '%s': %s object with ID=%s not found" % (self.field_name, related_model.__name__, data) ) return instance def to_internal_value(self, data): """Return the underlying object(s), given the serialized form.""" if self.kwargs['many']: serializer = self.serializer.child if not isinstance(data, list): raise ParseError("'%s' value must be a list" % self.field_name) return [ self.to_internal_value_single( instance, serializer ) for instance in data ] return self.to_internal_value_single(data, self.serializer) @property def serializer_class(self): """Get the class of the child serializer. Resolves string imports. """ serializer_class = self._serializer_class if not isinstance(serializer_class, six.string_types): return serializer_class parts = serializer_class.split('.') module_path = '.'.join(parts[:-1]) if not module_path: if getattr(self, 'parent', None) is None: raise Exception( "Can not load serializer '%s'" % serializer_class + ' before binding or without specifying full path') # try the module of the parent class module_path = self.parent.__module__ module = importlib.import_module(module_path) serializer_class = getattr(module, parts[-1]) self._serializer_class = serializer_class return serializer_class
class DynamicRelationField(WithRelationalFieldMixin, DynamicField): '''Field proxy for a nested serializer. Supports passing in the child serializer as a class or string, and resolves to the class after binding to the parent serializer. Will proxy certain arguments to the child serializer. Attributes: SERIALIZER_KWARGS: list of arguments that are passed to the child serializer. ''' def __init__( self, serializer_class, many=False, queryset=None, embed=False, sideloading=None, debug=False, **kwargs ): ''' Arguments: serializer_class: Serializer class (or string representation) to proxy. many: Boolean, if relation is to-many. queryset: Default queryset to apply when filtering for related objects. sideloading: if True, force sideloading all the way down. if False, force embedding all the way down. This overrides the "embed" option if set. embed: If True, always embed related object(s). Will not sideload, and will include the full object unless specifically excluded. ''' pass def get_model(self): '''Get the child serializer's model.''' pass def bind(self, *args, **kwargs): '''Bind to the parent serializer.''' pass @resettable_cached_property def root_serializer(self): '''Return the root serializer (serializer for the primary resource).''' pass def _get_cached_serializer(self, args, init_args): pass def _inherit_parent_kwargs(self, kwargs): '''Extract any necessary attributes from parent serializer to propagate down to child serializer. ''' pass def get_serializer(self, *args, **kwargs): '''Get an instance of the child serializer.''' pass @resettable_cached_property def serializer(self): pass @cached_property def _is_dynamic(self): '''Return True if the child serializer is dynamic.''' pass def get_attribute(self, instance): pass def to_representation(self, instance): '''Represent the relationship, either as an ID or object.''' pass def to_internal_value_single(self, data, serializer): '''Return the underlying object, given the serialized form.''' pass def to_internal_value_single(self, data, serializer): '''Return the underlying object(s), given the serialized form.''' pass @property def serializer_class(self): '''Get the class of the child serializer. Resolves string imports. ''' pass
19
12
19
2
13
4
4
0.3
2
6
1
0
14
11
14
20
300
48
194
66
166
59
144
52
129
7
3
3
51
4,261
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/fields/fields.py
dynamic_rest.fields.fields.DynamicHashIdField
class DynamicHashIdField(GetModelMixin, DynamicField): """ Represents an external ID (computed with hashids). Requires the source of the field to be an internal ID, and to provide a "model" keyword argument. Together these will produce the external ID. Based on https://github.com/evenicoulddoit/django-rest-framework-serializer-extensions implementation of HashIdField. """ default_error_messages = { 'malformed_hash_id': 'That is not a valid HashId', } def to_representation(self, value): return external_id_from_model_and_internal_id(self.get_model(), value) def to_internal_value(self, value): model = self.get_model() try: return internal_id_from_model_and_external_id(model, value) except ObjectDoesNotExist: self.fail('malformed_hash_id')
class DynamicHashIdField(GetModelMixin, DynamicField): ''' Represents an external ID (computed with hashids). Requires the source of the field to be an internal ID, and to provide a "model" keyword argument. Together these will produce the external ID. Based on https://github.com/evenicoulddoit/django-rest-framework-serializer-extensions implementation of HashIdField. ''' def to_representation(self, value): pass def to_internal_value(self, value): pass
3
1
4
0
4
0
2
0.67
2
0
0
0
2
0
2
9
25
5
12
5
9
8
10
5
7
2
3
1
3
4,262
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/fields/fields.py
dynamic_rest.fields.fields.DynamicField
class DynamicField(CacheableFieldMixin, fields.Field): """ Generic field base to capture additional custom field attributes. """ def __init__( self, requires=None, deferred=None, field_type=None, immutable=False, **kwargs ): """ Arguments: deferred: Whether or not this field is deferred. Deferred fields are not included in the response, unless explicitly requested. field_type: Field data type, if not inferrable from model. requires: List of fields that this field depends on. Processed by the view layer during queryset build time. """ self.requires = requires self.deferred = deferred self.field_type = field_type self.immutable = immutable self.kwargs = kwargs super(DynamicField, self).__init__(**kwargs) def to_representation(self, value): return value def to_internal_value(self, data): return data
class DynamicField(CacheableFieldMixin, fields.Field): ''' Generic field base to capture additional custom field attributes. ''' def __init__( self, requires=None, deferred=None, field_type=None, immutable=False, **kwargs ): ''' Arguments: deferred: Whether or not this field is deferred. Deferred fields are not included in the response, unless explicitly requested. field_type: Field data type, if not inferrable from model. requires: List of fields that this field depends on. Processed by the view layer during queryset build time. ''' pass def to_representation(self, value): pass def to_internal_value(self, data): pass
4
2
9
0
6
3
1
0.63
2
1
0
5
3
5
3
5
34
3
19
16
8
12
12
9
8
1
2
0
3
4,263
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/fields/fields.py
dynamic_rest.fields.fields.DynamicComputedField
class DynamicComputedField(DynamicField): pass
class DynamicComputedField(DynamicField): pass
1
0
0
0
0
0
0
0
1
0
0
1
0
0
0
5
2
0
2
1
1
0
2
1
1
0
3
0
0
4,264
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/fields/common.py
dynamic_rest.fields.common.WithRelationalFieldMixin
class WithRelationalFieldMixin(object): """Mostly code shared by DynamicRelationField and DynamicGenericRelationField. """ def _get_request_fields_from_parent(self): """Get request fields from the parent serializer.""" if not self.parent: return None if not getattr(self.parent, 'request_fields'): return None if not isinstance(self.parent.request_fields, dict): return None return self.parent.request_fields.get(self.field_name)
class WithRelationalFieldMixin(object): '''Mostly code shared by DynamicRelationField and DynamicGenericRelationField. ''' def _get_request_fields_from_parent(self): '''Get request fields from the parent serializer.''' pass
2
2
12
3
8
1
4
0.44
1
1
0
2
1
0
1
1
17
4
9
2
7
4
9
2
7
4
1
1
4
4,265
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/models.py
tests.models.Horse
class Horse(models.Model): name = models.TextField() origin = models.TextField()
class Horse(models.Model): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
3
0
3
3
2
0
3
3
2
0
1
0
0
4,266
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/datastructures.py
dynamic_rest.datastructures.TreeMap
class TreeMap(dict): """Tree structure implemented with nested dictionaries.""" def get_paths(self): """Get all paths from the root to the leaves. For example, given a chain like `{'a':{'b':{'c':None}}}`, this method would return `[['a', 'b', 'c']]`. Returns: A list of lists of paths. """ paths = [] for key, child in six.iteritems(self): if isinstance(child, TreeMap) and child: # current child is an intermediate node for path in child.get_paths(): path.insert(0, key) paths.append(path) else: # current child is an endpoint paths.append([key]) return paths def insert(self, parts, leaf_value, update=False): """Add a list of nodes into the tree. The list will be converted into a TreeMap (chain) and then merged with the current TreeMap. For example, this method would insert `['a','b','c']` as `{'a':{'b':{'c':{}}}}`. Arguments: parts: List of nodes representing a chain. leaf_value: Value to insert into the leaf of the chain. update: Whether or not to update the leaf with the given value or to replace the value. Returns: self """ tree = self if not parts: return tree cur = tree last = len(parts) - 1 for i, part in enumerate(parts): if part not in cur: cur[part] = TreeMap() if i != last else leaf_value elif i == last: # found leaf if update: cur[part].update(leaf_value) else: cur[part] = leaf_value cur = cur[part] return self
class TreeMap(dict): '''Tree structure implemented with nested dictionaries.''' def get_paths(self): '''Get all paths from the root to the leaves. For example, given a chain like `{'a':{'b':{'c':None}}}`, this method would return `[['a', 'b', 'c']]`. Returns: A list of lists of paths. ''' pass def insert(self, parts, leaf_value, update=False): '''Add a list of nodes into the tree. The list will be converted into a TreeMap (chain) and then merged with the current TreeMap. For example, this method would insert `['a','b','c']` as `{'a':{'b':{'c':{}}}}`. Arguments: parts: List of nodes representing a chain. leaf_value: Value to insert into the leaf of the chain. update: Whether or not to update the leaf with the given value or to replace the value. Returns: self ''' pass
3
3
28
5
13
11
6
0.85
1
1
0
0
2
0
2
29
60
11
27
10
24
23
24
10
21
7
2
3
11
4,267
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/conf.py
dynamic_rest.conf.Settings
class Settings(object): def __init__(self, name, defaults, settings, class_attrs=None): self.name = name self.defaults = defaults self.keys = set(defaults.keys()) self.class_attrs = class_attrs self._cache = {} self._reload(getattr(settings, self.name, {})) setting_changed.connect(self._settings_changed) def _reload(self, value): """Reload settings after a change.""" self.settings = value self._cache = {} def _load_class(self, attr, val): if inspect.isclass(val): return val elif isinstance(val, str): parts = val.split('.') module_path = '.'.join(parts[:-1]) class_name = parts[-1] mod = __import__(module_path, fromlist=[class_name]) return getattr(mod, class_name) elif val: raise Exception("%s must be string or a class" % attr) def __getattr__(self, attr): """Get a setting.""" if attr not in self._cache: if attr not in self.keys: raise AttributeError("Invalid API setting: '%s'" % attr) if attr in self.settings: val = self.settings[attr] else: val = self.defaults[attr] if attr in self.class_attrs and val: val = self._load_class(attr, val) # Cache the result self._cache[attr] = val return self._cache[attr] def _settings_changed(self, *args, **kwargs): """Handle changes to core settings.""" setting, value = kwargs['setting'], kwargs['value'] if setting == self.name: self._reload(value)
class Settings(object): def __init__(self, name, defaults, settings, class_attrs=None): pass def _reload(self, value): '''Reload settings after a change.''' pass def _load_class(self, attr, val): pass def __getattr__(self, attr): '''Get a setting.''' pass def _settings_changed(self, *args, **kwargs): '''Handle changes to core settings.''' pass
6
3
10
1
8
1
3
0.1
1
4
0
0
5
6
5
5
54
11
39
18
33
4
36
18
30
5
1
2
13
4,268
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/bases.py
dynamic_rest.bases.GetModelMixin
class GetModelMixin(object): """ Mixin to retrieve model hashid Implementation from https://github.com/evenicoulddoit/django-rest-framework-serializer-extensions """ def __init__(self, *args, **kwargs): self.model = kwargs.pop('model', None) super(GetModelMixin, self).__init__(*args, **kwargs) def get_model(self): """ Return the model to generate the HashId for. By default, this will equal the model defined within the Meta of the ModelSerializer, but can be redefined either during initialisation of the Field, or by providing a get_<field_name>_model method on the parent serializer. The Meta can either explicitly define a model, or provide a dot-delimited string path to it. """ if self.model is None: custom_fn_name = 'get_{0}_model'.format(self.field_name) if hasattr(self.parent, custom_fn_name): return getattr(self.parent, custom_fn_name)() else: try: return self.parent.Meta.model except AttributeError: raise AssertionError( 'No "model" value passed to field "{0}"'.format( type(self).__name__ ) ) elif isinstance(self.model, str): return model_from_definition(self.model) else: return self.model
class GetModelMixin(object): ''' Mixin to retrieve model hashid Implementation from https://github.com/evenicoulddoit/django-rest-framework-serializer-extensions ''' def __init__(self, *args, **kwargs): pass def get_model(self): ''' Return the model to generate the HashId for. By default, this will equal the model defined within the Meta of the ModelSerializer, but can be redefined either during initialisation of the Field, or by providing a get_<field_name>_model method on the parent serializer. The Meta can either explicitly define a model, or provide a dot-delimited string path to it. ''' pass
3
2
17
2
11
5
3
0.64
1
5
0
1
2
1
2
2
42
6
22
5
19
14
15
5
12
5
1
3
6
4,269
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/bases.py
dynamic_rest.bases.CacheableFieldMixin
class CacheableFieldMixin(object): """Overide Field.root and Field.context to make fields/serializers cacheable and reusable. The DRF version uses @cached_property which doesn't have a public API for resetting. This version uses normal object variables with and adds a `reset()` API. """ @resettable_cached_property def root(self): root = self while root.parent is not None: root = root.parent return root @resettable_cached_property def context(self): return getattr(self.root, '_context', {})
class CacheableFieldMixin(object): '''Overide Field.root and Field.context to make fields/serializers cacheable and reusable. The DRF version uses @cached_property which doesn't have a public API for resetting. This version uses normal object variables with and adds a `reset()` API. ''' @resettable_cached_property def root(self): pass @resettable_cached_property def context(self): pass
5
1
4
0
4
0
2
0.5
1
0
0
3
2
0
2
2
17
2
10
6
5
5
8
4
5
2
1
1
3
4,270
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/apps.py
dynamic_rest.apps.DynamicRestConfig
class DynamicRestConfig(AppConfig): name = "dynamic_rest" verbose_name = "Django Dynamic Rest" def ready(self): if hasattr(settings, "ENABLE_HASHID_FIELDS") and settings.ENABLE_HASHID_FIELDS: if not hasattr( settings, "HASHIDS_SALT") or settings.HASHIDS_SALT is None: raise ImproperlyConfigured( "ENABLED_HASHID_FIELDS is True in your settings," "but no HASHIDS_SALT string was set!")
class DynamicRestConfig(AppConfig): def ready(self): pass
2
0
9
1
8
0
3
0
1
0
0
0
1
0
1
1
13
2
11
4
9
0
7
4
5
3
1
2
3
4,271
AltSchool/dynamic-rest
AltSchool_dynamic-rest/benchmarks/test_bench.py
benchmarks.test_bench.BenchmarkTest
class BenchmarkTest(APITestCase): @classmethod def setUpClass(cls): # initialize results: a 4x nested dictionary cls._results = defaultdict( lambda: defaultdict( lambda: defaultdict( dict ) ) ) @classmethod def tearDownClass(cls): # save results to an HTML file with open('benchmarks.html', 'w') as file: file.write(CHART_HEAD) for benchmark_name, implementations in sorted( cls._results.items() ): data = [] for implementation_name, implementation_data in sorted( implementations.items() ): for key in implementation_data.keys(): values = sorted(implementation_data[key].values()) implementation_data[key] = get_average(values) implementation_data = sorted(implementation_data.items()) data.append({ 'name': implementation_name, 'data': implementation_data }) file.write( CHART_TEMPLATE.format( benchmark_name=benchmark_name, data=json.dumps(data) ) ) def bench( self, implementation_name, benchmark_name, url, size, sample ): start = datetime.now() response = self.client.get(url) end = datetime.now() self.assertEqual(response.status_code, 200) diff = end - start d = diff.total_seconds() self._results[benchmark_name][implementation_name][size][sample] = d def generate_linear(self, size): total = 0 for i in xrange(size): total += 1 User.objects.create( name=str(i) ) return total def generate_quadratic(self, size): total = 0 for i in xrange(size): total += 1 user = User.objects.create( name=str(i) ) for j in xrange(size): total += 1 group = Group.objects.create( name='%d-%d' % (i, j), max_size=size ) user.groups.add(group) return total def generate_cubic(self, size): total = 0 for i in xrange(size): total += 1 user = User.objects.create( name=str(i) ) for j in xrange(size): total += 1 group = Group.objects.create( name='%d-%d' % (i, j), max_size=size ) user.groups.add(group) for k in xrange(size): total += 1 permission = Permission.objects.create( name='%d-%d-%d' % (i, j, k) ) group.permissions.add(permission) return total
class BenchmarkTest(APITestCase): @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): pass def bench( self, implementation_name, benchmark_name, url, size, sample ): pass def generate_linear(self, size): pass def generate_quadratic(self, size): pass def generate_cubic(self, size): pass
9
0
16
1
15
0
3
0.02
1
6
3
0
4
0
6
6
105
9
94
41
78
2
53
31
46
4
1
4
15
4,272
AltSchool/dynamic-rest
AltSchool_dynamic-rest/benchmarks/models.py
benchmarks.models.User
class User(models.Model): name = models.TextField() groups = models.ManyToManyField('Group', related_name='users') created = models.DateTimeField(auto_now_add=True) updated = models.DateTimeField(auto_now=True)
class User(models.Model): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
5
0
5
5
4
0
5
5
4
0
1
0
0
4,273
AltSchool/dynamic-rest
AltSchool_dynamic-rest/benchmarks/drf.py
benchmarks.drf.UserWithGroupsViewSet
class UserWithGroupsViewSet(viewsets.ModelViewSet): queryset = User.objects.all() serializer_class = UserWithGroupsSerializer
class UserWithGroupsViewSet(viewsets.ModelViewSet): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
3
0
3
3
2
0
3
3
2
0
1
0
0
4,274
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/fields/fields.py
dynamic_rest.fields.fields.DynamicMethodField
class DynamicMethodField(SerializerMethodField, DynamicField): def reset(self): super(DynamicMethodField, self).reset() if self.method_name == 'get_' + self.field_name: self.method_name = None
class DynamicMethodField(SerializerMethodField, DynamicField): def reset(self): pass
2
0
4
0
4
0
2
0
2
1
0
0
1
1
1
6
5
0
5
3
3
0
5
3
3
2
3
1
2
4,275
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/models.py
tests.models.B
class B(models.Model): a = models.OneToOneField('A', related_name='b', on_delete=models.CASCADE)
class B(models.Model): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
2
0
2
2
1
0
2
2
1
0
1
0
0
4,276
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/prefetch.py
dynamic_rest.prefetch.FastQueryCompatMixin
class FastQueryCompatMixin(object): """ Mixins for FastQuery to provide QuerySet-compatibility APIs. They basically just modify the underlying QuerySet object. Separated in a mixin so it's clearer which APIs are supported. """ def prefetch_related(self, *args): try: for arg in args: if isinstance(arg, str): arg = FastPrefetch.make_from_field( model=self.model, field_name=arg ) elif isinstance(arg, Prefetch): arg = FastPrefetch.make_from_prefetch(arg, self.model) if not isinstance(arg, FastPrefetch): raise Exception("Must be FastPrefetch object") if arg.field in self.prefetches: raise Exception( "Prefetch for field '%s' already exists." ) self.prefetches[arg.field] = arg except Exception as e: # noqa traceback.print_exc() return self def only(self, *fields): # TODO: support this for realz ''' self.fields = set(self.fields) + set(fields) ''' return self def exclude(self, *args, **kwargs): self.queryset = self.queryset.exclude(*args, **kwargs) return self def count(self): qs = self.queryset._clone() return qs.count() def extra(self, *args, **kwargs): self.queryset = self.queryset.extra(*args, **kwargs) return self def filter(self, *args, **kwargs): self.queryset = self.queryset.filter(*args, **kwargs) return self def order_by(self, *ordering): self.queryset = self.queryset.order_by(*ordering) return self def distinct(self, *args, **kwargs): self.queryset = self.queryset.distinct(*args, **kwargs) return self def get(self, *args, **kwargs): # Returns ORM object queryset = self._get_django_queryset() return queryset.get(*args, **kwargs) def first(self, *args, **kwargs): # Returns ORM object queryset = self._get_django_queryset() return queryset.first() @property def query(self): return self.queryset.query def _clone(self): new = copy.copy(self) new.queryset = new.queryset._clone() return new def _get_django_queryset(self): """Return Django QuerySet with prefetches properly configured.""" prefetches = [] for field, fprefetch in self.prefetches.items(): has_query = hasattr(fprefetch, 'query') qs = fprefetch.query.queryset if has_query else None prefetches.append( Prefetch(field, queryset=qs) ) queryset = self.queryset if prefetches: queryset = queryset.prefetch_related(*prefetches) return queryset def annotate(self, *args, **kwargs): self.queryset = self.queryset.annotate(*args, **kwargs) return self
class FastQueryCompatMixin(object): ''' Mixins for FastQuery to provide QuerySet-compatibility APIs. They basically just modify the underlying QuerySet object. Separated in a mixin so it's clearer which APIs are supported. ''' def prefetch_related(self, *args): pass def only(self, *fields): ''' self.fields = set(self.fields) + set(fields) ''' pass def exclude(self, *args, **kwargs): pass def count(self): pass def extra(self, *args, **kwargs): pass def filter(self, *args, **kwargs): pass def order_by(self, *ordering): pass def distinct(self, *args, **kwargs): pass def get(self, *args, **kwargs): pass def first(self, *args, **kwargs): pass @property def query(self): pass def _clone(self): pass def _get_django_queryset(self): '''Return Django QuerySet with prefetches properly configured.''' pass def annotate(self, *args, **kwargs): pass
16
3
6
0
5
1
2
0.17
1
3
1
1
14
2
14
14
99
19
69
29
53
12
60
26
45
7
1
3
23
4,277
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/processors.py
dynamic_rest.processors.SideloadingProcessor
class SideloadingProcessor(object): """A processor that sideloads serializer data. Sideloaded records are returned under top-level response keys and produces responses that are typically smaller than their nested equivalent. """ def __init__(self, serializer, data): """Initializes and runs the processor. Arguments: serializer: a DREST serializer data: the serializer's representation """ if isinstance(serializer, ListSerializer): serializer = serializer.child self.data = {} self.seen = defaultdict(set) self.plural_name = serializer.get_plural_name() self.name = serializer.get_name() # process the data, optionally sideloading self.process(data) # add the primary resource data into the response data resource_name = self.name if isinstance( data, dict ) else self.plural_name self.data[resource_name] = data def is_dynamic(self, data): """Check whether the given data dictionary is a DREST structure. Arguments: data: A dictionary representation of a DRF serializer. """ return isinstance(data, TaggedDict) def process(self, obj, parent=None, parent_key=None, depth=0): """Recursively process the data for sideloading. Converts the nested representation into a sideloaded representation. """ if isinstance(obj, list): for key, o in enumerate(obj): # traverse into lists of objects self.process(o, parent=obj, parent_key=key, depth=depth) elif isinstance(obj, dict): dynamic = self.is_dynamic(obj) returned = isinstance(obj, ReturnDict) if dynamic or returned: # recursively check all fields for key, o in six.iteritems(obj): if isinstance(o, list) or isinstance(o, dict): # lists or dicts indicate a relation self.process( o, parent=obj, parent_key=key, depth=depth + 1 ) if not dynamic or getattr(obj, 'embed', False): return serializer = obj.serializer name = serializer.get_plural_name() instance = getattr(obj, 'instance', serializer.instance) instance_pk = instance.pk if instance else None pk = getattr(obj, 'pk_value', instance_pk) or instance_pk # For polymorphic relations, `pk` can be a dict, so use the # string representation (dict isn't hashable). pk_key = repr(pk) # sideloading seen = True # if this object has not yet been seen if pk_key not in self.seen[name]: seen = False self.seen[name].add(pk_key) # prevent sideloading the primary objects if depth == 0: return # TODO: spec out the exact behavior for secondary instances of # the primary resource # if the primary resource is embedded, add it to a prefixed key if name == self.plural_name: name = '%s%s' % ( settings.ADDITIONAL_PRIMARY_RESOURCE_PREFIX, name ) if not seen: # allocate a top-level key in the data for this resource # type if name not in self.data: self.data[name] = [] # move the object into a new top-level bucket # and mark it as seen self.data[name].append(obj) else: # obj sideloaded, but maybe with other fields for o in self.data.get(name, []): if o.instance.pk == pk: o.update(obj) break # replace the object with a reference if parent is not None and parent_key is not None: parent[parent_key] = pk
class SideloadingProcessor(object): '''A processor that sideloads serializer data. Sideloaded records are returned under top-level response keys and produces responses that are typically smaller than their nested equivalent. ''' def __init__(self, serializer, data): '''Initializes and runs the processor. Arguments: serializer: a DREST serializer data: the serializer's representation ''' pass def is_dynamic(self, data): '''Check whether the given data dictionary is a DREST structure. Arguments: data: A dictionary representation of a DRF serializer. ''' pass def process(self, obj, parent=None, parent_key=None, depth=0): '''Recursively process the data for sideloading. Converts the nested representation into a sideloaded representation. ''' pass
4
4
36
5
21
10
7
0.57
1
5
1
0
3
4
3
3
119
20
63
19
59
36
49
19
45
17
1
5
21
4,278
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/migrations/0006_auto_20210921_1026.py
tests.migrations.0006_auto_20210921_1026.Migration
class Migration(migrations.Migration): dependencies = [ ('tests', '0005_auto_20170712_0759'), ] operations = [ migrations.AlterField( model_name='cat', name='hunting_grounds', field=models.ManyToManyField( related_name='annoying_cats', related_query_name='getoffmylawn', to='tests.Location'), ), migrations.AlterField( model_name='event', name='status', field=models.TextField( default='current'), ), migrations.AlterField( model_name='user', name='is_dead', field=models.BooleanField( default=False, null=True), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
29
2
27
3
26
0
3
3
2
0
1
0
0
4,279
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/migrations/0005_auto_20170712_0759.py
tests.migrations.0005_auto_20170712_0759.Migration
class Migration(migrations.Migration): dependencies = [ ('tests', '0004_user_is_dead'), ] operations = [ migrations.CreateModel( name='Car', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), # noqa ('name', models.CharField(max_length=60)), ], ), migrations.CreateModel( name='Country', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), # noqa ('name', models.CharField(max_length=60)), ('short_name', models.CharField(max_length=30)), ], ), migrations.CreateModel( name='Part', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), # noqa ('name', models.CharField(max_length=60)), ('car', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tests.Car')), # noqa ('country', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tests.Country')), # noqa ], ), migrations.AddField( model_name='car', name='country', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tests.Country'), # noqa ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0.17
1
0
0
0
0
0
0
0
37
2
35
3
34
6
3
3
2
0
1
0
0
4,280
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/migrations/0004_user_is_dead.py
tests.migrations.0004_user_is_dead.Migration
class Migration(migrations.Migration): dependencies = [ ('tests', '0003_auto_20160401_1656'), ] operations = [ migrations.AddField( model_name='user', name='is_dead', field=models.NullBooleanField(default=False), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
13
2
11
3
10
0
3
3
2
0
1
0
0
4,281
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/migrations/0003_auto_20160401_1656.py
tests.migrations.0003_auto_20160401_1656.Migration
class Migration(migrations.Migration): dependencies = [ ('contenttypes', '0001_initial'), ('tests', '0002_auto_20160310_1052'), ] operations = [ migrations.AddField( model_name='user', name='favorite_pet_id', field=models.TextField(null=True, blank=True), preserve_default=True, ), migrations.AddField( model_name='user', name='favorite_pet_type', field=models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType', null=True), # noqa preserve_default=True, ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0.05
1
0
0
0
0
0
0
0
21
2
19
3
18
1
3
3
2
0
1
0
0
4,282
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/migrations/0002_auto_20160310_1052.py
tests.migrations.0002_auto_20160310_1052.Migration
class Migration(migrations.Migration): dependencies = [ ('tests', '0001_initial'), ] operations = [ migrations.AddField( model_name='user', name='date_of_birth', field=models.DateField(blank=True, null=True), ), migrations.AlterField( model_name='group', name='name', field=models.TextField(unique=True), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
18
2
16
3
15
0
3
3
2
0
1
0
0
4,283
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/migrations/0001_initial.py
tests.migrations.0001_initial.Migration
class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='A', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.TextField(blank=True)), ], ), migrations.CreateModel( name='B', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('a', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='b', to='tests.A')), ], ), migrations.CreateModel( name='C', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('b', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cs', to='tests.B')), ], ), migrations.CreateModel( name='Cat', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.TextField()), ], ), migrations.CreateModel( name='D', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.TextField(blank=True)), ], ), migrations.CreateModel( name='Dog', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.TextField()), ('fur_color', models.TextField()), ('origin', models.TextField()), ], ), migrations.CreateModel( name='Event', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.TextField()), ('status', models.TextField(default=b'current')), ], ), migrations.CreateModel( name='Group', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.TextField()), ], ), migrations.CreateModel( name='Horse', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.TextField()), ('origin', models.TextField()), ], ), migrations.CreateModel( name='Location', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.TextField()), ('blob', models.TextField()), ], ), migrations.CreateModel( name='Permission', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.TextField()), ('code', models.IntegerField()), ], ), migrations.CreateModel( name='Profile', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('display_name', models.TextField()), ('thumbnail_url', models.TextField(blank=True, null=True)), ], ), migrations.CreateModel( name='User', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.TextField()), ('last_name', models.TextField()), ('groups', models.ManyToManyField(related_name='users', to='tests.Group')), ('location', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='tests.Location')), ('permissions', models.ManyToManyField(related_name='users', to='tests.Permission')), ], ), migrations.CreateModel( name='Zebra', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.TextField()), ('origin', models.TextField()), ], ), migrations.AddField( model_name='profile', name='user', field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='tests.User'), ), migrations.AddField( model_name='group', name='permissions', field=models.ManyToManyField(related_name='groups', to='tests.Permission'), ), migrations.AddField( model_name='event', name='location', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='tests.Location'), ), migrations.AddField( model_name='event', name='users', field=models.ManyToManyField(to='tests.User'), ), migrations.AddField( model_name='cat', name='backup_home', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='friendly_cats', to='tests.Location'), ), migrations.AddField( model_name='cat', name='home', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tests.Location'), ), migrations.AddField( model_name='cat', name='hunting_grounds', field=models.ManyToManyField(related_name='annoying_cats', related_query_name=b'getoffmylawn', to='tests.Location'), ), migrations.AddField( model_name='cat', name='parent', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='kittens', to='tests.Cat'), ), migrations.AddField( model_name='c', name='d', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tests.D'), ), ]
class Migration(migrations.Migration): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
164
3
161
4
160
0
4
4
3
0
1
0
0
4,284
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/management/commands/initialize_fixture.py
tests.management.commands.initialize_fixture.Command
class Command(BaseCommand): help = 'Loads fixture data' def handle(self, *args, **options): create_fixture() self.stdout.write("Loaded fixtures.")
class Command(BaseCommand): def handle(self, *args, **options): pass
2
0
4
1
3
0
1
0
1
0
0
0
1
0
1
1
7
2
5
3
3
0
5
3
3
1
1
0
1
4,285
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/test_prefetch.py
tests.test_prefetch.TestPrefetch
class TestPrefetch(TestCase): """Tests prefetch corner-case bugs introduced in Django 1.7 See dynamic_rest.patches for details. """ def test_nested_prefetch(self): a = A.objects.create(name="a") b = B.objects.create(a=a) d = D.objects.create(name="d") C.objects.create(b=b, d=d) # This fails A.objects.prefetch_related( Prefetch( 'b', queryset=B.objects.prefetch_related( Prefetch( 'cs', queryset=C.objects.prefetch_related( Prefetch( 'd', queryset=D.objects.all() ) ) ) ) ) )[0]
class TestPrefetch(TestCase): '''Tests prefetch corner-case bugs introduced in Django 1.7 See dynamic_rest.patches for details. ''' def test_nested_prefetch(self): pass
2
1
23
1
21
1
1
0.18
1
4
4
0
1
0
1
1
29
3
22
5
20
4
7
5
5
1
1
0
1
4,286
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/viewsets.py
dynamic_rest.viewsets.QueryParams
class QueryParams(QueryDict): """ Extension of Django's QueryDict. Instantiated from a DRF Request object, and returns a mutable QueryDict subclass. Also adds methods that might be useful for our usecase. """ def __init__(self, query_params, *args, **kwargs): if hasattr(query_params, 'urlencode'): query_string = query_params.urlencode() else: assert isinstance( query_params, (six.string_types, six.binary_type) ) query_string = query_params kwargs['mutable'] = True super(QueryParams, self).__init__(query_string, *args, **kwargs) def add(self, key, value): """ Method to accept a list of values and append to flat list. QueryDict.appendlist(), if given a list, will append the list, which creates nested lists. In most cases, we want to be able to pass in a list (for convenience) but have it appended into a flattened list. TODO: Possibly throw an error if add() is used on a non-list param. """ if isinstance(value, list): for val in value: self.appendlist(key, val) else: self.appendlist(key, value)
class QueryParams(QueryDict): ''' Extension of Django's QueryDict. Instantiated from a DRF Request object, and returns a mutable QueryDict subclass. Also adds methods that might be useful for our usecase. ''' def __init__(self, query_params, *args, **kwargs): pass def add(self, key, value): ''' Method to accept a list of values and append to flat list. QueryDict.appendlist(), if given a list, will append the list, which creates nested lists. In most cases, we want to be able to pass in a list (for convenience) but have it appended into a flattened list. TODO: Possibly throw an error if add() is used on a non-list param. ''' pass
3
2
13
0
9
4
3
0.72
1
2
0
0
2
0
2
2
33
2
18
5
15
13
13
5
10
3
1
2
5
4,287
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/viewsets.py
dynamic_rest.viewsets.DynamicModelViewSet
class DynamicModelViewSet(WithDynamicViewSetMixin, viewsets.ModelViewSet): ENABLE_BULK_PARTIAL_CREATION = settings.ENABLE_BULK_PARTIAL_CREATION ENABLE_BULK_UPDATE = settings.ENABLE_BULK_UPDATE ENABLE_PATCH_ALL = settings.ENABLE_PATCH_ALL def _get_bulk_payload(self, request): plural_name = self.get_serializer_class().get_plural_name() if isinstance(request.data, list): return request.data elif plural_name in request.data and len(request.data) == 1: return request.data[plural_name] return None def _bulk_update(self, data, partial=False): # Restrict the update to the filtered queryset. serializer = self.get_serializer( self.filter_queryset(self.get_queryset()), data=data, many=True, partial=partial ) serializer.is_valid(raise_exception=True) self.perform_update(serializer) return Response(serializer.data, status=status.HTTP_200_OK) def _validate_patch_all(self, data): if not isinstance(data, dict): raise ValidationError( 'Patch-all data must be in object form' ) serializer = self.get_serializer() fields = serializer.get_all_fields() validated = {} for name, value in six.iteritems(data): field = fields.get(name, None) if field is None: raise ValidationError( 'Unknown field: "%s"' % name ) source = field.source or name if source == '*' or field.read_only: raise ValidationError( 'Cannot update field: "%s"' % name ) validated[source] = value return validated def _patch_all_query(self, queryset, data): # update by queryset try: return queryset.update(**data) except Exception as e: raise ValidationError( 'Failed to bulk-update records:\n' '%s\n' 'Data: %s' % ( str(e), str(data) ) ) def _patch_all_loop(self, queryset, data): # update by transaction loop updated = 0 try: with transaction.atomic(): for record in queryset: for k, v in six.iteritems(data): setattr(record, k, v) record.save() updated += 1 return updated except IntegrityError as e: raise ValidationError( 'Failed to update records:\n' '%s\n' 'Data: %s' % ( str(e), str(data) ) ) def _patch_all(self, data, query=False): queryset = self.filter_queryset(self.get_queryset()) data = self._validate_patch_all(data) updated = ( self._patch_all_query(queryset, data) if query else self._patch_all_loop(queryset, data) ) return Response({ 'meta': { 'updated': updated } }, status=status.HTTP_200_OK) def update(self, request, *args, **kwargs): """Update one or more model instances. If ENABLE_BULK_UPDATE is set, multiple previously-fetched records may be updated in a single call, provided their IDs. If ENABLE_PATCH_ALL is set, multiple records may be updated in a single PATCH call, even without knowing their IDs. *WARNING*: ENABLE_PATCH_ALL should be considered an advanced feature and used with caution. This feature must be enabled at the viewset level and must also be requested explicitly by the client via the "patch-all" query parameter. This parameter can have one of the following values: true (or 1): records will be fetched and then updated in a transaction loop - The `Model.save` method will be called and model signals will run - This can be slow if there are too many signals or many records in the query - This is considered the more safe and default behavior query: records will be updated in a single query - The `QuerySet.update` method will be called and model signals will not run - This will be fast, but may break data constraints that are controlled by signals - This is considered unsafe but useful in certain situations The server's successful response to a patch-all request will NOT include any individual records. Instead, the response content will contain a "meta" object with an "updated" count of updated records. Examples: Update one dog: PATCH /dogs/1/ { 'fur': 'white' } Update many dogs by ID: PATCH /dogs/ [ {'id': 1, 'fur': 'white'}, {'id': 2, 'fur': 'black'}, {'id': 3, 'fur': 'yellow'} ] Update all dogs in a query: PATCH /dogs/?filter{fur.contains}=brown&patch-all=true { 'fur': 'gold' } """ # noqa if self.ENABLE_BULK_UPDATE: patch_all = self.get_request_patch_all() if self.ENABLE_PATCH_ALL and patch_all: # patch-all update data = request.data return self._patch_all( data, query=(patch_all == 'query') ) else: # bulk payload update partial = 'partial' in kwargs bulk_payload = self._get_bulk_payload(request) if bulk_payload: return self._bulk_update(bulk_payload, partial) # singular update try: return super(DynamicModelViewSet, self).update(request, *args, **kwargs) except AssertionError as e: err = str(e) if 'Fix your URL conf' in err: # this error is returned by DRF if a client # makes an update request (PUT or PATCH) without an ID # since DREST supports bulk updates with IDs contained in data, # we return a 400 instead of a 500 for this case, # as this is not considered a misconfiguration raise exceptions.ValidationError(err) else: raise def _create_many(self, data): items = [] errors = [] result = {} serializers = [] for entry in data: serializer = self.get_serializer(data=entry) try: serializer.is_valid(raise_exception=True) except exceptions.ValidationError as e: errors.append({ 'detail': str(e), 'source': entry }) else: if self.ENABLE_BULK_PARTIAL_CREATION: self.perform_create(serializer) items.append( serializer.to_representation(serializer.instance)) else: serializers.append(serializer) if not self.ENABLE_BULK_PARTIAL_CREATION and not errors: for serializer in serializers: self.perform_create(serializer) items.append( serializer.to_representation(serializer.instance)) # Populate serialized data to the result. result = SideloadingProcessor( self.get_serializer(), items ).data # Include errors if any. if errors: result['errors'] = errors code = (status.HTTP_201_CREATED if not errors else status.HTTP_400_BAD_REQUEST) return Response(result, status=code) def create(self, request, *args, **kwargs): """ Either create a single or many model instances in bulk using the Serializer's many=True ability from Django REST >= 2.2.5. The data can be represented by the serializer name (single or plural forms), dict or list. Examples: POST /dogs/ { "name": "Fido", "age": 2 } POST /dogs/ { "dog": { "name": "Lucky", "age": 3 } } POST /dogs/ { "dogs": [ {"name": "Fido", "age": 2}, {"name": "Lucky", "age": 3} ] } POST /dogs/ [ {"name": "Fido", "age": 2}, {"name": "Lucky", "age": 3} ] """ bulk_payload = self._get_bulk_payload(request) if bulk_payload: return self._create_many(bulk_payload) return super(DynamicModelViewSet, self).create( request, *args, **kwargs) def _destroy_many(self, data): instances = self.get_queryset().filter( id__in=[d['id'] for d in data] ).distinct() for instance in instances: self.check_object_permissions(self.request, instance) self.perform_destroy(instance) return Response(status=status.HTTP_204_NO_CONTENT) def destroy(self, request, *args, **kwargs): """ Either delete a single or many model instances in bulk DELETE /dogs/ { "dogs": [ {"id": 1}, {"id": 2} ] } DELETE /dogs/ [ {"id": 1}, {"id": 2} ] """ bulk_payload = self._get_bulk_payload(request) if bulk_payload: return self._destroy_many(bulk_payload) lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field if lookup_url_kwarg not in kwargs: # assume that it is a poorly formatted bulk request return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED) return super(DynamicModelViewSet, self).destroy( request, *args, **kwargs )
class DynamicModelViewSet(WithDynamicViewSetMixin, viewsets.ModelViewSet): def _get_bulk_payload(self, request): pass def _bulk_update(self, data, partial=False): pass def _validate_patch_all(self, data): pass def _patch_all_query(self, queryset, data): pass def _patch_all_loop(self, queryset, data): pass def _patch_all_query(self, queryset, data): pass def update(self, request, *args, **kwargs): '''Update one or more model instances. If ENABLE_BULK_UPDATE is set, multiple previously-fetched records may be updated in a single call, provided their IDs. If ENABLE_PATCH_ALL is set, multiple records may be updated in a single PATCH call, even without knowing their IDs. *WARNING*: ENABLE_PATCH_ALL should be considered an advanced feature and used with caution. This feature must be enabled at the viewset level and must also be requested explicitly by the client via the "patch-all" query parameter. This parameter can have one of the following values: true (or 1): records will be fetched and then updated in a transaction loop - The `Model.save` method will be called and model signals will run - This can be slow if there are too many signals or many records in the query - This is considered the more safe and default behavior query: records will be updated in a single query - The `QuerySet.update` method will be called and model signals will not run - This will be fast, but may break data constraints that are controlled by signals - This is considered unsafe but useful in certain situations The server's successful response to a patch-all request will NOT include any individual records. Instead, the response content will contain a "meta" object with an "updated" count of updated records. Examples: Update one dog: PATCH /dogs/1/ { 'fur': 'white' } Update many dogs by ID: PATCH /dogs/ [ {'id': 1, 'fur': 'white'}, {'id': 2, 'fur': 'black'}, {'id': 3, 'fur': 'yellow'} ] Update all dogs in a query: PATCH /dogs/?filter{fur.contains}=brown&patch-all=true { 'fur': 'gold' } ''' pass def _create_many(self, data): pass def create(self, request, *args, **kwargs): ''' Either create a single or many model instances in bulk using the Serializer's many=True ability from Django REST >= 2.2.5. The data can be represented by the serializer name (single or plural forms), dict or list. Examples: POST /dogs/ { "name": "Fido", "age": 2 } POST /dogs/ { "dog": { "name": "Lucky", "age": 3 } } POST /dogs/ { "dogs": [ {"name": "Fido", "age": 2}, {"name": "Lucky", "age": 3} ] } POST /dogs/ [ {"name": "Fido", "age": 2}, {"name": "Lucky", "age": 3} ] ''' pass def _destroy_many(self, data): pass def destroy(self, request, *args, **kwargs): ''' Either delete a single or many model instances in bulk DELETE /dogs/ { "dogs": [ {"id": 1}, {"id": 2} ] } DELETE /dogs/ [ {"id": 1}, {"id": 2} ] ''' pass
12
3
26
2
15
9
3
0.6
2
7
1
14
11
0
11
27
306
39
167
49
155
100
113
45
101
8
2
4
38
4,288
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/tagged.py
dynamic_rest.tagged._TaggedPlainDict
class _TaggedPlainDict(TaggedDict, dict): pass
class _TaggedPlainDict(TaggedDict, dict): pass
1
0
0
0
0
0
0
0
2
0
0
0
0
0
0
31
2
0
2
1
1
0
2
1
1
0
2
0
0
4,289
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/tagged.py
dynamic_rest.tagged._TaggedOrderedDict
class _TaggedOrderedDict(TaggedDict, OrderedDict): pass
class _TaggedOrderedDict(TaggedDict, OrderedDict): pass
1
0
0
0
0
0
0
0
2
0
0
0
0
0
0
54
2
0
2
1
1
0
2
1
1
0
3
0
0
4,290
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/tagged.py
dynamic_rest.tagged.TaggedDict
class TaggedDict(object): """ Return object from `to_representation` for the `Serializer` class. Includes a reference to the `instance` and the `serializer` represented. """ def __init__(self, *args, **kwargs): self.serializer = kwargs.pop('serializer') self.instance = kwargs.pop('instance') self.embed = kwargs.pop('embed', False) self.pk_value = kwargs.pop('pk_value', None) if not isinstance(self, dict): raise Exception( "TaggedDict constructed not as a dict" ) super(TaggedDict, self).__init__(*args, **kwargs) def copy(self): return tag_dict( self, serializer=self.serializer, instance=self.instance, embed=self.embed, pk_value=self.pk_value ) def __repr__(self): return dict.__repr__(self) def __reduce__(self): return (dict, (dict(self),))
class TaggedDict(object): ''' Return object from `to_representation` for the `Serializer` class. Includes a reference to the `instance` and the `serializer` represented. ''' def __init__(self, *args, **kwargs): pass def copy(self): pass def __repr__(self): pass def __reduce__(self): pass
5
1
6
0
6
0
1
0.17
1
3
0
2
4
4
4
4
32
5
23
9
18
4
15
9
10
2
1
1
5
4,291
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/prefetch.py
dynamic_rest.prefetch.SlowObject
class SlowObject(dict): def __init__(self, slow_object=None, *args, **kwargs): self.pk_field = kwargs.pop('pk_field', 'id') self.data = slow_object return super(SlowObject, self).__init__(slow_object.__dict__, *args) @property def pk(self): return self[self.pk_field] def __getitem__(self, value): if hasattr(self.data, str(value)): return getattr(self.data, str(value)) # for the purposse of mapping serialized model + '_id' fields back to # internal models, we need to check if that pattern is present is_nested_obj = value.split('_') test_attr = '_'.join(is_nested_obj[:-1]) attr_exists = hasattr(self.data, test_attr) if is_nested_obj[-1] == 'id' and attr_exists: return getattr(self.data, test_attr).id return None def __iter__(self): return iter([self.data]) def __getattr__(self, value): # EAFP return getattr(self.data, str(value))
class SlowObject(dict): def __init__(self, slow_object=None, *args, **kwargs): pass @property def pk(self): pass def __getitem__(self, value): pass def __iter__(self): pass def __getattr__(self, value): pass
7
0
5
1
4
1
1
0.14
1
2
0
0
5
2
5
32
32
8
21
12
14
3
20
11
14
3
2
1
7
4,292
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/serializers.py
dynamic_rest.serializers.WithResourceKeyMixin
class WithResourceKeyMixin(object): def get_resource_key(self): """Return canonical resource key, usually the DB table name.""" model = self.get_model() if model: return get_model_table(model) else: return self.get_name()
class WithResourceKeyMixin(object): def get_resource_key(self): '''Return canonical resource key, usually the DB table name.''' pass
2
1
7
0
6
1
2
0.14
1
0
0
2
1
0
1
1
8
0
7
3
5
1
6
3
4
2
1
1
2
4,293
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/serializers.py
dynamic_rest.serializers.WithDynamicModelSerializerMixin
class WithDynamicModelSerializerMixin(WithDynamicSerializerMixin): """Adds DREST serializer methods specific to model-based serializers.""" @classmethod def get_model(cls): return getattr(cls.Meta, 'model', None) def get_id_fields(self): """ Called to return a list of fields consisting of, at minimum, the PK field name. The output of this method is used to construct a Prefetch object with a .only() queryset when this field is not being sideloaded but we need to return a list of IDs. """ model = self.get_model() out = [model._meta.pk.name] # get PK field name # If this is being called, it means it # is a many-relation to its parent. # Django wants the FK to the parent, # but since accurately inferring the FK # pointing back to the parent is less than trivial, # we will just pull all ID fields. # TODO: We also might need to return all non-nullable fields, # or else it is possible Django will issue another request. for field in model._meta.fields: if isinstance(field, models.ForeignKey): out.append(field.name + '_id') return out
class WithDynamicModelSerializerMixin(WithDynamicSerializerMixin): '''Adds DREST serializer methods specific to model-based serializers.''' @classmethod def get_model(cls): pass def get_id_fields(self): ''' Called to return a list of fields consisting of, at minimum, the PK field name. The output of this method is used to construct a Prefetch object with a .only() queryset when this field is not being sideloaded but we need to return a list of IDs. ''' pass
4
2
14
2
5
8
2
1.55
1
0
0
1
1
0
2
33
33
6
11
7
7
17
10
6
7
3
3
2
4
4,294
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/serializers.py
dynamic_rest.serializers.EphemeralObject
class EphemeralObject(object): """Object that initializes attributes from a dict.""" def __init__(self, values_dict): if 'pk' not in values_dict: raise Exception('"pk" key is required') self.__dict__.update(values_dict)
class EphemeralObject(object): '''Object that initializes attributes from a dict.''' def __init__(self, values_dict): pass
2
1
4
0
4
0
2
0.2
1
1
0
0
1
0
1
1
8
2
5
2
3
1
5
2
3
2
1
1
2
4,295
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/serializers.py
dynamic_rest.serializers.DynamicModelSerializer
class DynamicModelSerializer( WithDynamicModelSerializerMixin, serializers.ModelSerializer ): """DREST-compatible model-based serializer.""" pass
class DynamicModelSerializer( WithDynamicModelSerializerMixin, serializers.ModelSerializer ): '''DREST-compatible model-based serializer.''' pass
1
1
0
0
0
0
0
0.2
2
0
0
15
0
0
0
33
7
1
5
4
1
1
2
1
1
0
4
0
0
4,296
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/serializers.py
dynamic_rest.serializers.DynamicListSerializer
class DynamicListSerializer( CacheableFieldMixin, WithResourceKeyMixin, serializers.ListSerializer ): """Custom ListSerializer class. This implementation delegates DREST-specific methods to the child serializer and performs post-processing before returning the data. """ update_lookup_field = 'id' def __init__(self, *args, **kwargs): super(DynamicListSerializer, self).__init__(*args, **kwargs) self.child.parent = self def to_representation(self, data): iterable = data.all() if isinstance(data, models.Manager) else data return [self.child.to_representation(item) for item in iterable] def get_model(self): """Get the child's model.""" return self.child.get_model() def get_name(self): """Get the child's name.""" return self.child.get_name() def get_plural_name(self): """Get the child's plural name.""" return self.child.get_plural_name() def id_only(self): """Get the child's rendering mode.""" return self.child.id_only() @resettable_cached_property def data(self): """Get the data, after performing post-processing if necessary.""" data = super(DynamicListSerializer, self).data processed_data = ( ReturnDict(SideloadingProcessor(self, data).data, serializer=self) if self.child.envelope else ReturnList(data, serializer=self) ) processed_data = post_process(processed_data) return processed_data def update(self, queryset, validated_data): lookup_attr = getattr(self.child.Meta, 'update_lookup_field', 'id') lookup_objects = { str(entry.pop(lookup_attr)): entry for entry in validated_data } lookup_keys = lookup_objects.keys() if not all((bool(_) and not inspect.isclass(_) for _ in lookup_keys)): raise exceptions.ValidationError('Invalid lookup key value.') # Since this method is given a queryset which can have many # model instances, first find all objects to update # and only then update the models. try: objects_to_update = queryset.filter( **{'{}__in'.format(lookup_attr): lookup_keys} ) except Exception: raise exceptions.ValidationError( 'Invalid lookup keys: %s' % ', '.join(lookup_keys) ) if len(lookup_keys) != objects_to_update.count(): raise exceptions.ValidationError( 'Could not find all objects to update: {} != {}.'.format( len(lookup_keys), objects_to_update.count() ) ) updated_objects = [] for object_to_update in objects_to_update: lookup_key = getattr(object_to_update, lookup_attr) lookup_key = str(lookup_key) data = lookup_objects.get(lookup_key) # Use model serializer to actually update the model # in case that method is overwritten. updated_objects.append(self.child.update(object_to_update, data)) return updated_objects
class DynamicListSerializer( CacheableFieldMixin, WithResourceKeyMixin, serializers.ListSerializer ): '''Custom ListSerializer class. This implementation delegates DREST-specific methods to the child serializer and performs post-processing before returning the data. ''' def __init__(self, *args, **kwargs): pass def to_representation(self, data): pass def get_model(self): '''Get the child's model.''' pass def get_name(self): '''Get the child's name.''' pass def get_plural_name(self): '''Get the child's plural name.''' pass def id_only(self): '''Get the child's rendering mode.''' pass @resettable_cached_property def data(self): '''Get the data, after performing post-processing if necessary.''' pass def update(self, queryset, validated_data): pass
10
6
9
1
7
1
2
0.25
3
5
1
0
8
0
8
11
91
17
59
26
45
15
40
21
31
5
2
1
14
4,297
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/serializers.py
dynamic_rest.serializers.DynamicEphemeralSerializer
class DynamicEphemeralSerializer( WithDynamicSerializerMixin, serializers.Serializer ): """DREST-compatible baseclass for non-model serializers.""" def to_representation(self, instance): """ Provides post processing. Sub-classes should implement their own to_representation method, but pass the resulting dict through this function to get tagging and field selection. Arguments: instance: Serialized dict, or object. If object, it will be serialized by the super class's to_representation() method. """ if not isinstance(instance, dict): data = super( DynamicEphemeralSerializer, self ).to_representation(instance) else: data = instance instance = EphemeralObject(data) if self.id_only(): return data else: return tag_dict(data, serializer=self, instance=instance)
class DynamicEphemeralSerializer( WithDynamicSerializerMixin, serializers.Serializer ): '''DREST-compatible baseclass for non-model serializers.''' def to_representation(self, instance): ''' Provides post processing. Sub-classes should implement their own to_representation method, but pass the resulting dict through this function to get tagging and field selection. Arguments: instance: Serialized dict, or object. If object, it will be serialized by the super class's to_representation() method. ''' pass
2
2
25
3
13
9
3
0.59
2
3
1
3
1
0
1
32
32
5
17
6
12
10
9
3
7
3
3
1
3
4,298
AltSchool/dynamic-rest
AltSchool_dynamic-rest/dynamic_rest/routers.py
dynamic_rest.routers.DynamicRouter
class DynamicRouter(DefaultRouter): routes = copy.deepcopy(DefaultRouter.routes) modify_list_route(routes) def __init__(self, *args, **kwargs): optional_trailing_slash = kwargs.pop('optional_trailing_slash', True) super(DynamicRouter, self).__init__(*args, **kwargs) if optional_trailing_slash: self.trailing_slash = '/?' def get_api_root_view(self, **kwargs): """Return API root view, using the global directory.""" class API(views.APIView): _ignore_model_permissions = True def get(self, request, *args, **kwargs): directory_list = get_directory(request) result = OrderedDict() for group_name, url, endpoints, _ in directory_list: if url: result[group_name] = url else: group = OrderedDict() for endpoint_name, url, _, _ in endpoints: group[endpoint_name] = url result[group_name] = group return Response(result) return API.as_view() def register(self, prefix, viewset, base_name=None): """Add any registered route into a global API directory. If the prefix includes a path separator, store the URL in the directory under the first path segment. Otherwise, store it as-is. For example, if there are two registered prefixes, 'v1/users' and 'groups', `directory` will look liks: { 'v1': { 'users': { '_url': 'users-list' '_viewset': <class 'UserViewSet'> }, } 'groups': { '_url': 'groups-list' '_viewset': <class 'GroupViewSet'> } } """ if base_name is None: base_name = prefix super(DynamicRouter, self).register(prefix, viewset, base_name) prefix_parts = prefix.split('/') if len(prefix_parts) > 1: prefix = prefix_parts[0] endpoint = '/'.join(prefix_parts[1:]) else: endpoint = prefix prefix = None if prefix and prefix not in directory: current = directory[prefix] = {} else: current = directory.get(prefix, directory) list_name = self.routes[0].name url_name = list_name.format(basename=base_name) if endpoint not in current: current[endpoint] = {} current[endpoint]['_url'] = url_name current[endpoint]['_viewset'] = viewset def register_resource(self, viewset, namespace=None): """ Register a viewset that should be considered the canonical endpoint for a particular resource. In addition to generating and registering the route, it adds the route in a reverse map to allow DREST to build the canonical URL for a given resource. Arguments: viewset - viewset class, should have `serializer_class` attr. namespace - (optional) URL namespace, e.g. 'v3'. """ # Try to extract resource name from viewset. try: serializer = viewset.serializer_class() resource_key = serializer.get_resource_key() resource_name = serializer.get_name() path_name = serializer.get_plural_name() except BaseException: import traceback traceback.print_exc() raise Exception( "Failed to extract resource name from viewset: '%s'." " It, or its serializer, may not be DREST-compatible." % ( viewset ) ) # Construct canonical path and register it. if namespace: namespace = namespace.rstrip('/') + '/' base_path = namespace or '' base_path = r'%s' % base_path + path_name self.register(base_path, viewset) # Make sure resource isn't already registered. if resource_key in resource_map: raise Exception( "The resource '%s' has already been mapped to '%s'." " Each resource can only be mapped to one canonical" " path. " % ( resource_key, resource_map[resource_key]['path'] ) ) # Register resource in reverse map. resource_map[resource_key] = { 'path': base_path, 'viewset': viewset } # Make sure the resource name isn't registered, either # TODO: Think of a better way to clean this up, there's a lot of # duplicated effort here, between `resource_name` and `resource_key` # This resource name -> key mapping is currently only used by # the DynamicGenericRelationField if resource_name in resource_name_map: resource_key = resource_name_map[resource_name] raise Exception( "The resource name '%s' has already been mapped to '%s'." " A resource name can only be used once." % ( resource_name, resource_map[resource_key]['path'] ) ) # map the resource name to the resource key for easier lookup resource_name_map[resource_name] = resource_key @staticmethod def get_canonical_path(resource_key, pk=None): """ Return canonical resource path. Arguments: resource_key - Canonical resource key i.e. Serializer.get_resource_key(). pk - (Optional) Object's primary key for a single-resource URL. Returns: Absolute URL as string. """ if resource_key not in resource_map: # Note: Maybe raise? return None base_path = get_script_prefix() + resource_map[resource_key]['path'] if pk: return '%s/%s/' % (base_path, pk) else: return base_path @staticmethod def get_canonical_serializer( resource_key, model=None, instance=None, resource_name=None ): """ Return canonical serializer for a given resource name. Arguments: resource_key - Resource key, usually DB table for model-based resources, otherwise the plural name. model - (Optional) Model class to look up by. instance - (Optional) Model object instance. Returns: serializer class """ if model: resource_key = get_model_table(model) elif instance: resource_key = instance._meta.db_table elif resource_name: resource_key = resource_name_map[resource_name] if resource_key not in resource_map: return None return resource_map[resource_key]['viewset'].serializer_class def get_routes(self, viewset): """ DREST routes injection, overrides DRF's get_routes() method, which gets called for each registered viewset. """ routes = super(DynamicRouter, self).get_routes(viewset) routes += self.get_relation_routes(viewset) return routes def get_relation_routes(self, viewset): """ Generate routes to serve relational objects. This method will add a sub-URL for each relational field. e.g. A viewset for the following serializer: class UserSerializer(..): events = DynamicRelationField(EventSerializer, many=True) groups = DynamicRelationField(GroupSerializer, many=True) location = DynamicRelationField(LocationSerializer) will have the following URLs added: /users/<pk>/events/ /users/<pk>/groups/ /users/<pk>/location/ """ routes = [] if not hasattr(viewset, 'serializer_class'): return routes if not hasattr(viewset, 'list_related'): return routes serializer = viewset.serializer_class() fields = getattr(serializer, 'get_link_fields', lambda: [])() route_name = '{basename}-{methodnamehyphen}' if drf_version >= (3, 8, 0): route_compat_kwargs = {'detail': False} else: route_compat_kwargs = {} for field_name, field in six.iteritems(fields): methodname = 'list_related' url = ( r'^{prefix}/{lookup}/(?P<field_name>%s)' '{trailing_slash}$' % field_name ) routes.append(Route( url=url, mapping={'get': methodname}, name=replace_methodname(route_name, field_name), initkwargs={}, **route_compat_kwargs )) return routes
class DynamicRouter(DefaultRouter): def __init__(self, *args, **kwargs): pass def get_api_root_view(self, **kwargs): '''Return API root view, using the global directory.''' pass class API(views.APIView): def get_api_root_view(self, **kwargs): pass def register(self, prefix, viewset, base_name=None): '''Add any registered route into a global API directory. If the prefix includes a path separator, store the URL in the directory under the first path segment. Otherwise, store it as-is. For example, if there are two registered prefixes, 'v1/users' and 'groups', `directory` will look liks: { 'v1': { 'users': { '_url': 'users-list' '_viewset': <class 'UserViewSet'> }, } 'groups': { '_url': 'groups-list' '_viewset': <class 'GroupViewSet'> } } ''' pass def register_resource(self, viewset, namespace=None): ''' Register a viewset that should be considered the canonical endpoint for a particular resource. In addition to generating and registering the route, it adds the route in a reverse map to allow DREST to build the canonical URL for a given resource. Arguments: viewset - viewset class, should have `serializer_class` attr. namespace - (optional) URL namespace, e.g. 'v3'. ''' pass @staticmethod def get_canonical_path(resource_key, pk=None): ''' Return canonical resource path. Arguments: resource_key - Canonical resource key i.e. Serializer.get_resource_key(). pk - (Optional) Object's primary key for a single-resource URL. Returns: Absolute URL as string. ''' pass @staticmethod def get_canonical_serializer( resource_key, model=None, instance=None, resource_name=None ): ''' Return canonical serializer for a given resource name. Arguments: resource_key - Resource key, usually DB table for model-based resources, otherwise the plural name. model - (Optional) Model class to look up by. instance - (Optional) Model object instance. Returns: serializer class ''' pass def get_routes(self, viewset): ''' DREST routes injection, overrides DRF's get_routes() method, which gets called for each registered viewset. ''' pass def get_relation_routes(self, viewset): ''' Generate routes to serve relational objects. This method will add a sub-URL for each relational field. e.g. A viewset for the following serializer: class UserSerializer(..): events = DynamicRelationField(EventSerializer, many=True) groups = DynamicRelationField(GroupSerializer, many=True) location = DynamicRelationField(LocationSerializer) will have the following URLs added: /users/<pk>/events/ /users/<pk>/groups/ /users/<pk>/location/ ''' pass
13
7
29
4
17
8
3
0.52
1
4
1
0
6
1
8
8
259
40
144
48
125
75
100
41
88
5
1
3
31
4,299
AltSchool/dynamic-rest
AltSchool_dynamic-rest/tests/viewsets.py
tests.viewsets.CarViewSet
class CarViewSet(DynamicModelViewSet): serializer_class = CarSerializer queryset = Car.objects.all()
class CarViewSet(DynamicModelViewSet): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
27
3
0
3
3
2
0
3
3
2
0
3
0
0