prefix
stringlengths 82
32.6k
| middle
stringlengths 5
470
| suffix
stringlengths 0
81.2k
| file_path
stringlengths 6
168
| repo_name
stringlengths 16
77
| context
listlengths 5
5
| lang
stringclasses 4
values | ground_truth
stringlengths 5
470
|
---|---|---|---|---|---|---|---|
import { typeOf } from './internals'
import { JSONValue, PathParams, SearchParams } from './types'
/**
* @param url a string or URL to which the query parameters will be added
* @param searchParams the query parameters
* @returns the url with the query parameters added with the same type as the url
*/
function addQueryToURL(
url: string | URL,
searchParams?: SearchParams,
): string | URL {
if (!searchParams) return url
if (typeof url === 'string') {
const separator = url.includes('?') ? '&' : '?'
return `${url}${separator}${new URLSearchParams(searchParams)}`
}
if (searchParams && url instanceof URL) {
for (const [key, value] of Object.entries(
new URLSearchParams(searchParams),
)) {
url.searchParams.set(key, value)
}
}
return url
}
/**
* @param body the JSON-like body of the request
* @returns the body is stringified if it is not a string and it is a JSON-like object. It also accepts other types of BodyInit such as Blob, ReadableStream, etc.
*/
function ensureStringBody<B extends JSONValue | BodyInit | null>(
body?: B,
): B extends JSONValue ? string : B {
if (typeof body === 'undefined') return body as never
if (typeof body === 'string') return body as never
return (
['number', 'boolean', 'array', 'object'].includes(typeOf(body))
? JSON.stringify(body)
: body
) as never
}
/**
* @param baseURL the base path to the API
* @returns a function that receives a path and an object of query parameters and returns a URL
*/
function makeGetApiURL<T extends string | URL>(baseURL: T) {
const base = baseURL instanceof URL ? baseURL.toString() : baseURL
return (path: string, searchParams?: SearchParams): T => {
const url = `${base}/${path}`.replace(/([^https?:]\/)\/+/g, '$1')
return addQueryToURL(url, searchParams) as T
}
}
/**
* It merges multiple HeadersInit objects into a single Headers object
* @param entries Any number of HeadersInit objects
* @returns a new Headers object with the merged headers
*/
function mergeHeaders(
...entries: (
| HeadersInit
| [string, undefined][]
| Record<string, undefined>
)[]
) {
const result = new Map<string, string>()
for (const entry of entries) {
const headers = new Headers(entry as HeadersInit)
for (const [key, value] of headers.entries()) {
if (value === undefined || value === 'undefined') {
result.delete(key)
} else {
result.set(key, value)
}
}
}
return new Headers(Array.from(result.entries()))
}
/**
*
* @param url the url string or URL object to replace the params
* @param params the params map to be replaced in the url
* @returns the url with the params replaced and with the same type as the given url
*/
function replaceURLParams<T extends string | URL>(
url: T,
|
params: PathParams<T>,
): T {
|
// TODO: use the URL Pattern API as soon as it has better browser support
if (!params) return url as T
let urlString = String(url)
Object.entries(params).forEach(([key, value]) => {
urlString = urlString.replace(new RegExp(`:${key}($|\/)`), `${value}$1`)
})
return (url instanceof URL ? new URL(urlString) : urlString) as T
}
export {
addQueryToURL,
ensureStringBody,
makeGetApiURL,
mergeHeaders,
replaceURLParams,
}
|
src/primitives.ts
|
gustavoguichard-make-service-e5a7bea
|
[
{
"filename": "src/api.ts",
"retrieved_chunk": " const headers = mergeHeaders(\n {\n 'content-type': 'application/json',\n },\n reqInit.headers ?? {},\n )\n const withParams = replaceURLParams<T>(url, reqInit.params ?? ({} as never))\n const fullURL = addQueryToURL(withParams, query)\n const body = ensureStringBody(reqInit.body)\n const enhancedReqInit = { ...reqInit, headers, body }",
"score": 0.8006328344345093
},
{
"filename": "src/types.ts",
"retrieved_chunk": "type TypedResponse = Omit<Response, 'json' | 'text'> & {\n json: TypedResponseJson\n text: TypedResponseText\n}\ntype PathParams<T> = T extends string\n ? ExtractPathParams<T> extends Record<string, unknown>\n ? ExtractPathParams<T>\n : Record<string, string>\n : Record<string, string>\ntype EnhancedRequestInit<T = string> = Omit<RequestInit, 'body' | 'method'> & {",
"score": 0.7915666103363037
},
{
"filename": "src/types.ts",
"retrieved_chunk": "type Prettify<T> = {\n [K in keyof T]: T[K]\n} & {}\ntype ExtractPathParams<T extends string> =\n T extends `${infer _}:${infer Param}/${infer Rest}`\n ? Prettify<Omit<{ [K in Param]: string } & ExtractPathParams<Rest>, ''>>\n : T extends `${infer _}:${infer Param}`\n ? { [K in Param]: string }\n : {}\nexport type {",
"score": 0.7890791296958923
},
{
"filename": "src/api.test.ts",
"retrieved_chunk": " })\n .transform(kebabToCamel),\n ),\n )\n type _R = Expect<\n Equal<typeof result, { foo: { deepNested: { kindOfValue: boolean } } }>\n >\n expect(result).toEqual({ foo: { deepNested: { kindOfValue: true } } })\n })\n it('should replace params in the URL', async () => {",
"score": 0.7685384750366211
},
{
"filename": "src/types.ts",
"retrieved_chunk": "import { HTTP_METHODS } from './constants'\nimport { getJson, getText } from './internals'\ntype Schema<T> = { parse: (d: unknown) => T }\ntype JSONValue =\n | string\n | number\n | boolean\n | { [x: string]: JSONValue }\n | Array<JSONValue>\ntype SearchParams = ConstructorParameters<typeof URLSearchParams>[0]",
"score": 0.7673795223236084
}
] |
typescript
|
params: PathParams<T>,
): T {
|
import { camelCase } from 'lodash';
import { type Request, type IProcedureResult, type IResult, type IRecordSet } from 'mssql';
import { type GraphQLResolveInfo } from 'graphql';
import {
type DriverType,
type PreparedStoredProcedureParameter,
ParameterMode,
type StoredProcedureSchema,
type StoredProcedureParameter,
type ILogger,
type InputParameters,
} from '../types';
import { mapDbTypeToDriverType, replacer } from '../utils';
import { logExecutionBegin, logPerformance, logSafely } from '../logging';
import {
type StoredProcedureCacheManager,
type StoredProcedureMetadataManager,
} from '../stored-procedure';
import { type IResolverProcedureResult } from '../types/i-resolver-procedure-result';
import { getNodeSelectionSetNames, getFieldNamesExcludingNode } from '../utils/graphql-helper';
/**
* StoredProcedureManager provides methods to interact
* with a Microsoft SQL Server database for managing stored procedures.
*/
export class StoredProcedureManager {
/**
* Creates a new instance of StoredProcedureManager.
*/
constructor(
private readonly _storedProcedureCacheManager: StoredProcedureCacheManager,
private readonly _storedProcedureMetadataManager: StoredProcedureMetadataManager,
) {}
/**
* Executes a stored procedure with the provided input parameters, and returns the result.
* @template TVal - The type of records in the result set.
* @template TRet - The type of the result object to be returned.
* @param {string} storedProcedureName - The name of the stored procedure to execute.
* @param {StoredProcedureInput} input - The input parameters for the stored procedure.
* @param {Request} request - The request to execute the stored procedure.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @param {ILogger} logger - The logger to use for logging.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async executeStoredProcedure<T>(
storedProcedureName: string,
input: InputParameters,
request: Request,
logger: ILogger,
info?: GraphQLResolveInfo,
): Promise<IResolverProcedureResult<T>> {
let startTime = performance.now();
let schema = (await this._storedProcedureCacheManager.tryGetFromCache(storedProcedureName)) as
| IResult<StoredProcedureSchema>
| undefined;
if (schema === undefined) {
logSafely(
logger,
'info',
// Yellow
`\x1b[33mCache miss occurred while retrieving the cached schema for ${storedProcedureName}\x1b[0m`,
);
schema = await this._storedProcedureMetadataManager.getStoredProcedureParameterSchema(
storedProcedureName,
logger,
);
await
|
this._storedProcedureCacheManager.addToCache(storedProcedureName, schema);
|
} else {
logSafely(
logger,
'info',
// Green
`\x1b[32mCache hit occurred while retrieving the cached schema for ${storedProcedureName}\x1b[0m`,
);
}
logPerformance(logger, 'getStoredProcedureParameterSchema', startTime);
startTime = performance.now();
const storedProcedureParameters =
this._storedProcedureMetadataManager.parseStoredProcedureParameters(
storedProcedureName,
schema,
);
logPerformance(logger, 'parseStoredProcedureParameters', startTime);
startTime = performance.now();
const preparedRequest = this.prepareStoredProcedureRequest(
storedProcedureParameters,
input,
request,
);
logPerformance(logger, 'prepareStoredProcedureRequest', startTime);
startTime = performance.now();
logExecutionBegin(
logger,
`Stored Procedure ${storedProcedureName} with parameters`,
preparedRequest.parameters,
// Green
'32m',
);
const result = await preparedRequest.execute(storedProcedureName);
startTime = performance.now();
const preparedResult = this.prepareStoredProcedureResult(result, info);
logPerformance(logger, 'prepareStoredProcedureResult', startTime);
return preparedResult;
}
private prepareParameters(
storedProcedureParameters: IterableIterator<StoredProcedureParameter>,
input: InputParameters,
): Map<string, PreparedStoredProcedureParameter> {
// We want to use the inferred DB Stored Procedure schema as the source of truth.
const preparedParameters = new Map<string, PreparedStoredProcedureParameter>();
for (const spParameter of storedProcedureParameters) {
const { name, type, length, precision, scale, ...rest } = spParameter;
const parameterName = name.slice(1);
// Let's use the parameter name in lowercase as the lookup key.
preparedParameters.set(parameterName.toLowerCase(), {
name: parameterName,
type: mapDbTypeToDriverType({
type,
length,
precision,
scale,
}) as DriverType,
value: undefined,
...rest,
});
}
// Populate our input values into the request parameters.
const inputParameters = input as Record<string, unknown>;
for (const inputParameterKey in inputParameters) {
const preparedParameter = preparedParameters.get(inputParameterKey.toLowerCase());
if (preparedParameter != null) {
preparedParameter.value = inputParameters[inputParameterKey];
}
// We don't care about provided input parameters that are missing in the Stored Procedure definition.
}
return preparedParameters;
}
private getMissingRequiredParameters(
parameters: Map<string, PreparedStoredProcedureParameter>,
): PreparedStoredProcedureParameter[] {
// Check what required parameters are missing.
const missingRequiredParameters = [];
for (const parameter of parameters.values()) {
// If they have a default value they can be ommitted from the request.
if (parameter.defaultValue === undefined && parameter.value === undefined) {
missingRequiredParameters.push(parameter);
}
}
return missingRequiredParameters;
}
private addParametersToRequest(
parameters: Map<string, PreparedStoredProcedureParameter>,
request: Request,
): Request {
const preparedRequest = request;
for (const parameter of parameters.values()) {
const { name, type, mode, value, defaultValue } = parameter;
if (defaultValue !== undefined && value === undefined) {
continue;
}
const modeEnum = mode;
if (modeEnum === ParameterMode.IN) {
preparedRequest.input(name, type, value);
} else if (modeEnum === ParameterMode.INOUT) {
preparedRequest.output(name, type, value);
} else {
throw new Error(`Unknown parameter mode: ${mode}`);
}
}
return preparedRequest;
}
/**
* Prepares the stored procedure request.
* @param {IterableIterator<StoredProcedureParameter>} storedProcedureParameters - The stored procedure parameters.
* @param {StoredProcedureInput} input - The input object.
* @param {Request} request - The request object.
* @returns A prepared request object.
*/
private prepareStoredProcedureRequest(
storedProcedureParameters: IterableIterator<StoredProcedureParameter>,
input: InputParameters,
request: Request,
): Request {
const parameters = this.prepareParameters(storedProcedureParameters, input);
const missingRequiredParameters = this.getMissingRequiredParameters(parameters);
const missingLength = missingRequiredParameters.length;
if (missingLength > 0) {
throw new Error(
`Missing ${missingLength} required parameters: ${missingRequiredParameters
.map((param) => JSON.stringify(param, replacer, 0))
.join(', ')}.`,
);
}
const preparedRequest = this.addParametersToRequest(parameters, request);
return preparedRequest;
}
/**
* Maps the keys of an object based on the provided mapping.
* @template T - The type of the original object.
* @param {T} obj - The object whose keys need to be mapped.
* @param {Record<string, string>} mapping - A dictionary containing the mapping of the original keys to the new keys.
* @returns {T} A new object with the keys mapped according to the provided mapping.
*/
private mapKeysWithMapping<T extends Record<string, unknown>>(
obj: T,
mapping: Record<string, string>,
): T {
const result: Record<string, unknown> = {};
for (const key in obj) {
const mappedKey = mapping[key.toLowerCase()] ?? camelCase(key);
result[mappedKey] = obj[key];
}
return result as T;
}
/**
* Prepares the stored procedure result into a GraphQL result object.
* @param {IProcedureResult} result - The stored procedure result.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @returns {IResolverProcedureResult} A prepared GraphQL result object.
*/
private prepareStoredProcedureResult<T extends Record<string, unknown>>(
result: IProcedureResult<T>,
info?: GraphQLResolveInfo,
): IResolverProcedureResult<T> {
const { resultSetFields, outputFields } =
info !== undefined
? {
resultSetFields: getNodeSelectionSetNames(info, 'resultSets'),
outputFields: getFieldNamesExcludingNode(info, 'resultSets'),
}
: { resultSetFields: {}, outputFields: {} };
const resultSets = result.recordsets.map((recordset: IRecordSet<Record<string, unknown>>) => {
return recordset.map((record: Record<string, unknown>) =>
this.mapKeysWithMapping(record, resultSetFields),
);
});
const output = this.mapKeysWithMapping(result.output, outputFields);
const preparedResult = {
returnValue: result.returnValue,
resultSets: resultSets as T[][],
rowsAffected: result.rowsAffected,
...output,
};
return preparedResult;
}
}
|
src/lib/stored-procedure/stored-procedure-manager.ts
|
Falven-mssql-data-source-bca6621
|
[
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " 'NUMERIC_PRECISION as precision, ' +\n 'NUMERIC_SCALE as scale ' +\n 'FROM INFORMATION_SCHEMA.PARAMETERS ' +\n `WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';\n SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,\n );\n const recordSetLength = result.recordsets.length as number;\n if (recordSetLength < 1 || recordSetLength > 2) {\n throw new Error(\n `Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,",
"score": 0.8353489637374878
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " );\n }\n if (recordSetLength !== 2 || result.recordsets[1].length !== 1) {\n throw new Error(\n `Could not retrieve stored procedure definition from Database for stored procedure ${storedProcedureName}.`,\n );\n }\n return result;\n }, logger);\n }",
"score": 0.8282290697097778
},
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " request,\n logger,\n info,\n ),\n logger,\n );\n logExecutionEnd(logger, `Stored Procedure Query ${storedProcedureName}`, startTime);\n logSafely(logger, 'info', `------------------`);\n return result;\n }",
"score": 0.8268024921417236
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " ): Promise<IResult<StoredProcedureSchema>> {\n return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {\n // Remove square bracket notation if any, and split into schema and name.\n const schemaAndName = storedProcedureName.replace(/\\[|\\]/g, '').split('.');\n const result = await request.query<StoredProcedureSchema>(\n 'SELECT ' +\n 'PARAMETER_NAME as name, ' +\n 'DATA_TYPE as type, ' +\n 'PARAMETER_MODE as mode, ' +\n 'CHARACTER_MAXIMUM_LENGTH length, ' +",
"score": 0.8232195377349854
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " const parameterSchemaMap: Map<string, StoredProcedureParameter> =\n schemaResult.recordsets[0].reduce(\n (parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {\n parameterMap.set(item.name, item);\n return parameterMap;\n },\n new Map<string, StoredProcedureParameter>(),\n );\n const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;\n if (storedProcedureDefinition == null) {",
"score": 0.8128505945205688
}
] |
typescript
|
this._storedProcedureCacheManager.addToCache(storedProcedureName, schema);
|
import sourceToCST, {
ConcreteAttributeNode,
ConcreteElementOpeningTagNode,
ConcreteElementSelfClosingTagNode,
ConcreteLiquidDropNode,
ConcreteNode,
ConcreteNodeTypes,
ConcreteTextNode,
} from '../1-source-to-cst';
import { UnknownConcreteNodeTypeError } from '../errors';
import ASTBuilder from './ast-builder';
export type BasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export enum NodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementNode = 'ElementNode',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type TextNode = {
value: string;
} & BasicNode<NodeTypes.TextNode>;
export type LiquidDropNode = {
value: string;
} & BasicNode<NodeTypes.LiquidDropNode>;
export type LiquidXNode = TextNode | LiquidDropNode | ElementNode | AttributeNode;
export type ElementNode = {
name: string;
source: string;
attributes: AttributeNode[];
children: LiquidXNode[];
} & BasicNode<NodeTypes.ElementNode>;
export type AttributeNode =
| AttributeDoubleQuoted
| AttributeSingleQuoted
| AttributeUnquoted
| AttributeEmpty;
export type AttributeNodeBase<T> = {
name: TextNode;
value: TextNode | LiquidDropNode;
} & BasicNode<T>;
export type AttributeDoubleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeDoubleQuoted>;
export type AttributeSingleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeSingleQuoted>;
export type AttributeUnquoted = {} & AttributeNodeBase<NodeTypes.AttributeUnquoted>;
export type AttributeEmpty = { name: TextNode } & BasicNode<NodeTypes.AttributeEmpty>;
function toTextNode(node: ConcreteTextNode): TextNode {
return {
type: NodeTypes.TextNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toLiquidDropNode(node: ConcreteLiquidDropNode): LiquidDropNode {
return {
type: NodeTypes.LiquidDropNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toElementNode(
node: ConcreteElementOpeningTagNode | ConcreteElementSelfClosingTagNode,
): ElementNode {
return {
type: NodeTypes.ElementNode,
locStart: node.locStart,
locEnd: node.locEnd,
name: node.name,
source: node.source,
attributes: toAttributes(node.attributes),
children: [],
};
}
function toAttributes(attributes: ConcreteAttributeNode[]) {
return cstToAST(attributes) as AttributeNode[];
}
function toAttributeValue(value: ConcreteTextNode | ConcreteLiquidDropNode) {
return cstToAST([value])[0] as TextNode | LiquidDropNode;
}
function isAttributeNode(node: any): boolean {
return (
node.type === ConcreteNodeTypes.AttributeDoubleQuoted ||
node.type === ConcreteNodeTypes.AttributeSingleQuoted ||
node.type === ConcreteNodeTypes.AttributeUnquoted ||
node.type === ConcreteNodeTypes.AttributeEmpty
);
}
function cstToAST(cst: ConcreteNode[] | ConcreteAttributeNode[]) {
if (cst.length === 0) return [];
const astBuilder = new
|
ASTBuilder(cst[0].source);
|
for (let i = 0; i < cst.length; i += 1) {
const node = cst[i];
const prevNode = cst[i - 1];
// Add whitespaces and linebreaks that went missing after parsing. We don't need to do this
// if the node is an attribute since whitespaces between attributes is not important to preserve.
// In fact it would probably break the rendered output due to unexpected text nodes.
// TODO: This should be handled in the grammar/source-to-cst part instead (if possible).
if (prevNode?.source && !isAttributeNode(node)) {
const diff = node.locStart - prevNode.locEnd;
if (diff > 0) {
astBuilder.push(
toTextNode({
type: ConcreteNodeTypes.TextNode,
locStart: prevNode.locEnd,
locEnd: node.locStart,
source: node.source,
value: prevNode.source.slice(prevNode.locEnd, node.locStart),
}),
);
}
}
switch (node.type) {
case ConcreteNodeTypes.TextNode: {
astBuilder.push(toTextNode(node));
break;
}
case ConcreteNodeTypes.LiquidDropNode: {
astBuilder.push(toLiquidDropNode(node));
break;
}
case ConcreteNodeTypes.ElementOpeningTag: {
astBuilder.open(toElementNode(node));
break;
}
case ConcreteNodeTypes.ElementClosingTag: {
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.ElementSelfClosingTag: {
astBuilder.open(toElementNode(node));
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.AttributeDoubleQuoted:
case ConcreteNodeTypes.AttributeSingleQuoted:
case ConcreteNodeTypes.AttributeUnquoted: {
const attributeNode: AttributeDoubleQuoted | AttributeSingleQuoted | AttributeUnquoted = {
type: node.type as unknown as
| NodeTypes.AttributeDoubleQuoted
| NodeTypes.AttributeSingleQuoted
| NodeTypes.AttributeUnquoted,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
value: toAttributeValue(node.value),
};
astBuilder.push(attributeNode);
break;
}
case ConcreteNodeTypes.AttributeEmpty: {
const attributeNode: AttributeEmpty = {
type: NodeTypes.AttributeEmpty,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
};
astBuilder.push(attributeNode);
break;
}
default: {
throw new UnknownConcreteNodeTypeError(
'',
(node as any)?.source,
(node as any)?.locStart,
(node as any)?.locEnd,
);
}
}
}
return astBuilder.finish();
}
export default function sourceToAST(source: string): LiquidXNode[] {
const cst = sourceToCST(source);
const ast = cstToAST(cst);
return ast;
}
|
src/parser/2-cst-to-ast/index.ts
|
unshopable-liquidx-a101873
|
[
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " attributeName: textNode,\n attributeDoubleQuotedValue: 0,\n attributeSingleQuotedValue: 0,\n attributeUnquotedValue: 0,\n attributeDoubleQuotedTextNode: textNode,\n attributeSingleQuotedTextNode: textNode,\n attributeUnquotedTextNode: textNode,\n };\n const cst = toAST(matchResult, mapping) as ConcreteNode[];\n return cst;",
"score": 0.8676000833511353
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " | ConcreteAttributeSingleQuoted\n | ConcreteAttributeUnquoted\n | ConcreteAttributeEmpty;\nexport type ConcreteAttributeDoubleQuoted =\n {} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeDoubleQuoted>;\nexport type ConcreteAttributeSingleQuoted =\n {} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeSingleQuoted>;\nexport type ConcreteAttributeUnquoted =\n {} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeUnquoted>;\nexport type ConcreteAttributeEmpty = {",
"score": 0.8396831750869751
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " const childrenAttribute: AttributeNode = {\n type: NodeTypes.AttributeDoubleQuoted,\n locStart: 0,\n locEnd: 0,\n source: '',\n name: {\n type: NodeTypes.TextNode,\n locStart: 0,\n locEnd: 0,\n source: '',",
"score": 0.8366147875785828
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": "export default function sourceToCST(source: string): ConcreteNode[] {\n const matchResult = grammar.match(source);\n if (matchResult.failed()) {\n throw new CSTParsingError(matchResult);\n }\n const textNode = {\n type: ConcreteNodeTypes.TextNode,\n locStart,\n locEnd,\n value: function (this: Node) {",
"score": 0.8353428840637207
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " name: ConcreteTextNode;\n} & ConcreteBasicNode<ConcreteNodeTypes.AttributeEmpty>;\nexport type CST = ConcreteNode[];\nexport type TemplateMapping = {\n type: ConcreteNodeTypes;\n locStart: (node: Node[]) => number;\n locEnd: (node: Node[]) => number;\n source: string;\n [k: string]: string | number | boolean | object | null;\n};",
"score": 0.8344331979751587
}
] |
typescript
|
ASTBuilder(cst[0].source);
|
import { type IResult, type Request } from 'mssql';
import type { StoredProcedureParameter, StoredProcedureSchema, ILogger } from '../types';
import { type DatabaseExecutor } from '../executor';
import { convertSqlValueToJsValue } from '../utils';
/**
* A manager for stored procedure metadata.
* Handles the retrieval and caching of stored procedure metadata.
*/
export class StoredProcedureMetadataManager {
/**
* Regular expression to extract MSSQL stored procedure names.
* See https://regex101.com/r/cMsTyT/1 for this regex.
*/
private static readonly storedProcedureNameRegex =
/((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))/i;
/**
* Matches any comments from the Stored Procedure definition.
* See https://regex101.com/r/dxA7n0/1 for this regex.
*/
private static readonly commentRegex = /(?:\s*-{2}.+\s*$)|(?:\/\*([\s\S]*?)\*\/)/gm;
/**
* Matches the parameters from the Stored Procedure definition.
* See https://regex101.com/r/4TaTky/1 for this regex.
*/
private static readonly parameterSectionRegex =
/(?<=(?:CREATE|ALTER)\s+PROCEDURE)\s+((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))(.*?)(?=(?:AS|FOR\s+REPLICATION)[^\w])/is;
/**
* See https://regex101.com/r/iMEaLb/1 for this regex.
* Match the individual parameters in the Parameter Definition.
*/
private static readonly parameterDefinitionRegex = /(@[\w]+)\s+([^\s]+)\s*=\s*([^, ]*),?/gi;
constructor(private readonly _databaseExecutor: DatabaseExecutor) {}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to retrieve the parameter schema for.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async getStoredProcedureParameterSchema(
storedProcedureName: string,
logger: ILogger,
): Promise<IResult<StoredProcedureSchema>> {
return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {
// Remove square bracket notation if any, and split into schema and name.
const schemaAndName = storedProcedureName.replace(/\[|\]/g, '').split('.');
const result = await request.query<StoredProcedureSchema>(
'SELECT ' +
'PARAMETER_NAME as name, ' +
'DATA_TYPE as type, ' +
'PARAMETER_MODE as mode, ' +
'CHARACTER_MAXIMUM_LENGTH length, ' +
'NUMERIC_PRECISION as precision, ' +
'NUMERIC_SCALE as scale ' +
'FROM INFORMATION_SCHEMA.PARAMETERS ' +
`WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';
SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,
);
const recordSetLength = result.recordsets.length as number;
if (recordSetLength < 1 || recordSetLength > 2) {
throw new Error(
`Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,
);
}
if (recordSetLength !== 2 || result.recordsets[1].length !== 1) {
throw new Error(
`Could not retrieve stored procedure definition from Database for stored procedure ${storedProcedureName}.`,
);
}
return result;
}, logger);
}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to parse the parameter schema for.
* @param {IResult<StoredProcedureSchema>} schemaResult - The result of the stored procedure parameter schema query.
* @returns A StoredProcedureParameter array.
*/
public parseStoredProcedureParameters(
storedProcedureName: string,
schemaResult: IResult<StoredProcedureSchema>,
): IterableIterator<StoredProcedureParameter> {
const parameterSchemaMap: Map<string, StoredProcedureParameter> =
schemaResult.recordsets[0].reduce(
(parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {
parameterMap.set(item.name, item);
return parameterMap;
},
new Map<string, StoredProcedureParameter>(),
);
const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;
if (storedProcedureDefinition == null) {
throw new Error(
`Could not parse stored procedure definition for stored procedure ${storedProcedureName}.`,
);
}
const commentStrippedStoredProcedureDefinition = storedProcedureDefinition.replace(
StoredProcedureMetadataManager.commentRegex,
'',
);
if (commentStrippedStoredProcedureDefinition === '') {
throw new Error(
`Could not parse stored procedure comments from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterSection = commentStrippedStoredProcedureDefinition.match(
StoredProcedureMetadataManager.parameterSectionRegex,
);
if (parameterSection === null || parameterSection.length !== 9) {
throw new Error(
`Could not parse stored procedure parameters from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterDefinition = parameterSection[8];
let parameterDefinitionMatch;
while (
(parameterDefinitionMatch =
StoredProcedureMetadataManager.parameterDefinitionRegex.exec(parameterDefinition)) !== null
) {
const name = parameterDefinitionMatch[1];
const type = parameterDefinitionMatch[2];
const defaultValue = parameterDefinitionMatch[3];
const parameter = parameterSchemaMap.get(name);
if (parameter !== undefined) {
parameter.
|
defaultValue = convertSqlValueToJsValue(defaultValue, type);
|
}
}
return parameterSchemaMap.values();
}
}
|
src/lib/stored-procedure/stored-procedure-metadata-manager.ts
|
Falven-mssql-data-source-bca6621
|
[
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " for (const spParameter of storedProcedureParameters) {\n const { name, type, length, precision, scale, ...rest } = spParameter;\n const parameterName = name.slice(1);\n // Let's use the parameter name in lowercase as the lookup key.\n preparedParameters.set(parameterName.toLowerCase(), {\n name: parameterName,\n type: mapDbTypeToDriverType({\n type,\n length,\n precision,",
"score": 0.85175621509552
},
{
"filename": "src/lib/types/i-stored-procedure-parameter.ts",
"retrieved_chunk": " */\nexport interface StoredProcedureParameter {\n name: string;\n type: string;\n mode: ParameterMode;\n defaultValue?: unknown;\n length?: number;\n precision?: number;\n scale?: number;\n}",
"score": 0.8313586711883545
},
{
"filename": "src/lib/utils/type-map.ts",
"retrieved_chunk": "};\nexport const convertSqlValueToJsValue = (value: SqlValue, sqlType: string): unknown => {\n if (value === 'NULL') {\n return null;\n }\n const lowerCaseSqlType = sqlType.toLowerCase();\n if (\n isType(lowerCaseSqlType, [\n 'varchar',\n 'nvarchar',",
"score": 0.8124374747276306
},
{
"filename": "src/lib/utils/type-map.ts",
"retrieved_chunk": "}: Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => {\n const types: IndexableTypes = TYPES;\n const property = findPropertyCaseInsensitive(types, type);\n if (property !== null) {\n const typeFactory = types[property as TypesKey];\n if (isSqlTypeFactoryWithNoParams(typeFactory)) {\n return typeFactory();\n } else if (isSqlTypeFactoryWithLength(typeFactory)) {\n return (typeFactory as ISqlTypeFactoryWithLength)(length === -1 ? MAX : length);\n } else if (isSqlTypeFactoryWithScale(typeFactory)) {",
"score": 0.8103339672088623
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " schema,\n );\n logPerformance(logger, 'parseStoredProcedureParameters', startTime);\n startTime = performance.now();\n const preparedRequest = this.prepareStoredProcedureRequest(\n storedProcedureParameters,\n input,\n request,\n );\n logPerformance(logger, 'prepareStoredProcedureRequest', startTime);",
"score": 0.8021944761276245
}
] |
typescript
|
defaultValue = convertSqlValueToJsValue(defaultValue, type);
|
import { Node } from 'ohm-js';
import { toAST } from 'ohm-js/extras';
import { CSTParsingError } from '../errors';
import grammar from '../grammar';
export enum ConcreteNodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementOpeningTag = 'ElementOpeningTag',
ElementClosingTag = 'ElementClosingTag',
ElementSelfClosingTag = 'ElementSelfClosingTag',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type ConcreteNode =
| ConcreteTextNode
| ConcreteLiquidDropNode
| ConcreteElementOpeningTagNode
| ConcreteElementClosingTagNode
| ConcreteElementSelfClosingTagNode;
export type ConcreteBasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export type ConcreteTextNode = {
value: string;
} & ConcreteBasicNode<ConcreteNodeTypes.TextNode>;
export type ConcreteLiquidDropNode = {
value: string;
} & ConcreteBasicNode<ConcreteNodeTypes.LiquidDropNode>;
export type ConcreteElementOpeningTagNode = {
name: string;
attributes: ConcreteAttributeNode[];
} & ConcreteBasicNode<ConcreteNodeTypes.ElementOpeningTag>;
export type ConcreteElementClosingTagNode = {
name: string;
} & ConcreteBasicNode<ConcreteNodeTypes.ElementClosingTag>;
export type ConcreteElementSelfClosingTagNode = {
name: string;
attributes: ConcreteAttributeNode[];
} & ConcreteBasicNode<ConcreteNodeTypes.ElementSelfClosingTag>;
export type ConcreteAttributeNodeBase<T> = {
name: ConcreteTextNode;
value: ConcreteTextNode;
} & ConcreteBasicNode<T>;
export type ConcreteAttributeNode =
| ConcreteAttributeDoubleQuoted
| ConcreteAttributeSingleQuoted
| ConcreteAttributeUnquoted
| ConcreteAttributeEmpty;
export type ConcreteAttributeDoubleQuoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeDoubleQuoted>;
export type ConcreteAttributeSingleQuoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeSingleQuoted>;
export type ConcreteAttributeUnquoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeUnquoted>;
export type ConcreteAttributeEmpty = {
name: ConcreteTextNode;
} & ConcreteBasicNode<ConcreteNodeTypes.AttributeEmpty>;
export type CST = ConcreteNode[];
export type TemplateMapping = {
type: ConcreteNodeTypes;
locStart: (node: Node[]) => number;
locEnd: (node: Node[]) => number;
source: string;
[k: string]: string | number | boolean | object | null;
};
export type TopLevelFunctionMapping = (...nodes: Node[]) => any;
export type Mapping = {
[k: string]: number | TemplateMapping | TopLevelFunctionMapping;
};
function locStart(nodes: Node[]) {
return nodes[0].source.startIdx;
}
function locEnd(nodes: Node[]) {
return nodes[nodes.length - 1].source.endIdx;
}
export default function sourceToCST(source: string): ConcreteNode[] {
const
|
matchResult = grammar.match(source);
|
if (matchResult.failed()) {
throw new CSTParsingError(matchResult);
}
const textNode = {
type: ConcreteNodeTypes.TextNode,
locStart,
locEnd,
value: function (this: Node) {
return this.sourceString;
},
source,
};
const mapping: Mapping = {
Node: 0,
TextNode: textNode,
liquidDropNode: {
type: ConcreteNodeTypes.LiquidDropNode,
locStart,
locEnd,
source,
value: 2,
},
liquidDropValue: (node: Node) => node.sourceString.trimEnd(),
ElementNode: 0,
ElementOpeningTag: {
type: ConcreteNodeTypes.ElementOpeningTag,
locStart,
locEnd,
name: 1,
attributes: 2,
source,
},
ElementClosingTag: {
type: ConcreteNodeTypes.ElementClosingTag,
locStart,
locEnd,
name: 1,
source,
},
ElementSelfClosingTag: {
type: ConcreteNodeTypes.ElementSelfClosingTag,
locStart,
locEnd,
name: 1,
attributes: 2,
source,
},
AttributeDoubleQuoted: {
type: ConcreteNodeTypes.AttributeDoubleQuoted,
locStart,
locEnd,
source,
name: 0,
value: 3,
},
AttributeSingleQuoted: {
type: ConcreteNodeTypes.AttributeSingleQuoted,
locStart,
locEnd,
source,
name: 0,
value: 3,
},
AttributeUnquoted: {
type: ConcreteNodeTypes.AttributeUnquoted,
locStart,
locEnd,
source,
name: 0,
value: 2,
},
AttributeEmpty: {
type: ConcreteNodeTypes.AttributeEmpty,
locStart,
locEnd,
source,
name: 0,
},
attributeName: textNode,
attributeDoubleQuotedValue: 0,
attributeSingleQuotedValue: 0,
attributeUnquotedValue: 0,
attributeDoubleQuotedTextNode: textNode,
attributeSingleQuotedTextNode: textNode,
attributeUnquotedTextNode: textNode,
};
const cst = toAST(matchResult, mapping) as ConcreteNode[];
return cst;
}
|
src/parser/1-source-to-cst/index.ts
|
unshopable-liquidx-a101873
|
[
{
"filename": "src/parser/2-cst-to-ast/index.ts",
"retrieved_chunk": " }\n return astBuilder.finish();\n}\nexport default function sourceToAST(source: string): LiquidXNode[] {\n const cst = sourceToCST(source);\n const ast = cstToAST(cst);\n return ast;\n}",
"score": 0.8660812377929688
},
{
"filename": "src/parser/2-cst-to-ast/index.ts",
"retrieved_chunk": "import sourceToCST, {\n ConcreteAttributeNode,\n ConcreteElementOpeningTagNode,\n ConcreteElementSelfClosingTagNode,\n ConcreteLiquidDropNode,\n ConcreteNode,\n ConcreteNodeTypes,\n ConcreteTextNode,\n} from '../1-source-to-cst';\nimport { UnknownConcreteNodeTypeError } from '../errors';",
"score": 0.843483567237854
},
{
"filename": "src/parser/2-cst-to-ast/ast-builder.ts",
"retrieved_chunk": " source: string;\n constructor(source: string) {\n this.ast = [];\n this.cursor = [];\n this.source = source;\n }\n get current(): LiquidXNode[] {\n return deepGet<LiquidXNode[]>(this.cursor, this.ast);\n }\n get currentPosition(): number {",
"score": 0.8351695537567139
},
{
"filename": "src/parser/errors.ts",
"retrieved_chunk": " const end = lc.fromIndex(Math.min(locEnd, source.length - 1));\n const location: SourceLocation = {\n start: {\n line: start?.line ?? source.length - 1,\n column: start?.col,\n },\n end: {\n line: end?.line ?? source.length,\n column: end?.col,\n },",
"score": 0.8325843214988708
},
{
"filename": "src/parser/2-cst-to-ast/index.ts",
"retrieved_chunk": " for (let i = 0; i < cst.length; i += 1) {\n const node = cst[i];\n const prevNode = cst[i - 1];\n // Add whitespaces and linebreaks that went missing after parsing. We don't need to do this\n // if the node is an attribute since whitespaces between attributes is not important to preserve.\n // In fact it would probably break the rendered output due to unexpected text nodes.\n // TODO: This should be handled in the grammar/source-to-cst part instead (if possible).\n if (prevNode?.source && !isAttributeNode(node)) {\n const diff = node.locStart - prevNode.locEnd;\n if (diff > 0) {",
"score": 0.8312081098556519
}
] |
typescript
|
matchResult = grammar.match(source);
|
import sourceToCST, {
ConcreteAttributeNode,
ConcreteElementOpeningTagNode,
ConcreteElementSelfClosingTagNode,
ConcreteLiquidDropNode,
ConcreteNode,
ConcreteNodeTypes,
ConcreteTextNode,
} from '../1-source-to-cst';
import { UnknownConcreteNodeTypeError } from '../errors';
import ASTBuilder from './ast-builder';
export type BasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export enum NodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementNode = 'ElementNode',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type TextNode = {
value: string;
} & BasicNode<NodeTypes.TextNode>;
export type LiquidDropNode = {
value: string;
} & BasicNode<NodeTypes.LiquidDropNode>;
export type LiquidXNode = TextNode | LiquidDropNode | ElementNode | AttributeNode;
export type ElementNode = {
name: string;
source: string;
attributes: AttributeNode[];
children: LiquidXNode[];
} & BasicNode<NodeTypes.ElementNode>;
export type AttributeNode =
| AttributeDoubleQuoted
| AttributeSingleQuoted
| AttributeUnquoted
| AttributeEmpty;
export type AttributeNodeBase<T> = {
name: TextNode;
value: TextNode | LiquidDropNode;
} & BasicNode<T>;
export type AttributeDoubleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeDoubleQuoted>;
export type AttributeSingleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeSingleQuoted>;
export type AttributeUnquoted = {} & AttributeNodeBase<NodeTypes.AttributeUnquoted>;
export type AttributeEmpty = { name: TextNode } & BasicNode<NodeTypes.AttributeEmpty>;
function toTextNode(node: ConcreteTextNode): TextNode {
return {
type: NodeTypes.TextNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toLiquidDropNode(node: ConcreteLiquidDropNode): LiquidDropNode {
return {
type: NodeTypes.LiquidDropNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toElementNode(
node: ConcreteElementOpeningTagNode | ConcreteElementSelfClosingTagNode,
): ElementNode {
return {
type: NodeTypes.ElementNode,
locStart: node.locStart,
locEnd: node.locEnd,
name: node.name,
source: node.source,
attributes: toAttributes(node.attributes),
children: [],
};
}
function toAttributes(attributes: ConcreteAttributeNode[]) {
return cstToAST(attributes) as AttributeNode[];
}
function toAttributeValue(value: ConcreteTextNode | ConcreteLiquidDropNode) {
return cstToAST([value])[0] as TextNode | LiquidDropNode;
}
function isAttributeNode(node: any): boolean {
return (
node.type === ConcreteNodeTypes.AttributeDoubleQuoted ||
node.type === ConcreteNodeTypes.AttributeSingleQuoted ||
node.type === ConcreteNodeTypes.AttributeUnquoted ||
node.type === ConcreteNodeTypes.AttributeEmpty
);
}
function cstToAST(cst: ConcreteNode[] | ConcreteAttributeNode[]) {
if (cst.length === 0) return [];
const astBuilder = new ASTBuilder(cst[0].source);
for (let i = 0; i < cst.length; i += 1) {
const node = cst[i];
const prevNode = cst[i - 1];
// Add whitespaces and linebreaks that went missing after parsing. We don't need to do this
// if the node is an attribute since whitespaces between attributes is not important to preserve.
// In fact it would probably break the rendered output due to unexpected text nodes.
// TODO: This should be handled in the grammar/source-to-cst part instead (if possible).
if (prevNode?.source && !isAttributeNode(node)) {
const diff = node.locStart - prevNode.locEnd;
if (diff > 0) {
astBuilder.push(
toTextNode({
type: ConcreteNodeTypes.TextNode,
locStart: prevNode.locEnd,
locEnd: node.locStart,
source: node.source,
value: prevNode.source.slice(prevNode.locEnd, node.locStart),
}),
);
}
}
switch (node.type) {
case ConcreteNodeTypes.TextNode: {
astBuilder.push(toTextNode(node));
break;
}
case ConcreteNodeTypes.LiquidDropNode: {
astBuilder.push(toLiquidDropNode(node));
break;
}
case ConcreteNodeTypes.ElementOpeningTag: {
astBuilder.
|
open(toElementNode(node));
|
break;
}
case ConcreteNodeTypes.ElementClosingTag: {
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.ElementSelfClosingTag: {
astBuilder.open(toElementNode(node));
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.AttributeDoubleQuoted:
case ConcreteNodeTypes.AttributeSingleQuoted:
case ConcreteNodeTypes.AttributeUnquoted: {
const attributeNode: AttributeDoubleQuoted | AttributeSingleQuoted | AttributeUnquoted = {
type: node.type as unknown as
| NodeTypes.AttributeDoubleQuoted
| NodeTypes.AttributeSingleQuoted
| NodeTypes.AttributeUnquoted,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
value: toAttributeValue(node.value),
};
astBuilder.push(attributeNode);
break;
}
case ConcreteNodeTypes.AttributeEmpty: {
const attributeNode: AttributeEmpty = {
type: NodeTypes.AttributeEmpty,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
};
astBuilder.push(attributeNode);
break;
}
default: {
throw new UnknownConcreteNodeTypeError(
'',
(node as any)?.source,
(node as any)?.locStart,
(node as any)?.locEnd,
);
}
}
}
return astBuilder.finish();
}
export default function sourceToAST(source: string): LiquidXNode[] {
const cst = sourceToCST(source);
const ast = cstToAST(cst);
return ast;
}
|
src/parser/2-cst-to-ast/index.ts
|
unshopable-liquidx-a101873
|
[
{
"filename": "src/parser/2-cst-to-ast/ast-builder.ts",
"retrieved_chunk": " node.locEnd,\n );\n }\n this.parent.locEnd = node.locEnd;\n this.cursor.pop();\n this.cursor.pop();\n }\n push(node: LiquidXNode) {\n this.current.push(node);\n }",
"score": 0.8390982151031494
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": "export type ConcreteLiquidDropNode = {\n value: string;\n} & ConcreteBasicNode<ConcreteNodeTypes.LiquidDropNode>;\nexport type ConcreteElementOpeningTagNode = {\n name: string;\n attributes: ConcreteAttributeNode[];\n} & ConcreteBasicNode<ConcreteNodeTypes.ElementOpeningTag>;\nexport type ConcreteElementClosingTagNode = {\n name: string;\n} & ConcreteBasicNode<ConcreteNodeTypes.ElementClosingTag>;",
"score": 0.8293100595474243
},
{
"filename": "src/parser/2-cst-to-ast/ast-builder.ts",
"retrieved_chunk": "import { ElementNode, LiquidXNode, NodeTypes } from '.';\nimport {\n ConcreteElementClosingTagNode,\n ConcreteElementSelfClosingTagNode,\n} from '../1-source-to-cst';\nimport { ASTParsingError } from '../errors';\nimport { deepGet, dropLast } from './utils';\nexport default class ASTBuilder {\n ast: LiquidXNode[];\n cursor: (string | number)[];",
"score": 0.8277385830879211
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " value = JSON.stringify(renderText(node.value));\n } else {\n value = renderLiquidDrop(node.value);\n }\n output += `${name}: ${value}`;\n break;\n }\n case NodeTypes.AttributeEmpty: {\n const name = renderText(node.name);\n const value = true;",
"score": 0.8266528844833374
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " case NodeTypes.ElementNode: {\n output += renderElement(node, { withSource, isChildOfElementNode });\n break;\n }\n case NodeTypes.AttributeDoubleQuoted:\n case NodeTypes.AttributeSingleQuoted:\n case NodeTypes.AttributeUnquoted: {\n const name = renderText(node.name);\n let value = null;\n if (node.value.type === NodeTypes.TextNode) {",
"score": 0.82407546043396
}
] |
typescript
|
open(toElementNode(node));
|
import config from '../config'
import { ImportData, ImportGroups, LibraryRule } from '../types'
const getImportDepth = (path: string) => {
return path.split('/').length
}
const asc = (a, b) => {
const depthA = getImportDepth(a.path)
const depthB = getImportDepth(b.path)
if (depthA !== depthB) {
return depthA - depthB
} else {
return a.path.localeCompare(b.path)
}
}
const desc = (a, b) => {
const depthA = getImportDepth(a.path)
const depthB = getImportDepth(b.path)
if (depthA !== depthB) {
return depthB - depthA
} else {
return a.path.localeCompare(b.path)
}
}
const sortLibraries = (imports: ImportData[]) => {
let result: ImportData[] = []
const groups = {}
for (const library of config.libs) {
groups[library.name] = []
for (let i = 0; i < imports.length; i++) {
const importData = imports[i]
if (
(library.rule === LibraryRule.EXACT && importData.path === library.name) ||
(library.rule === LibraryRule.STARTS && importData.path.startsWith(library.name)) ||
(library.rule === LibraryRule.INCLUDES && importData.path.includes(library.name))
) {
groups[library.name].push(importData)
imports.splice(i, 1)
i--
}
}
}
for (const groupKey in groups) {
groups[groupKey].sort(asc)
result = [...result, ...groups[groupKey]]
}
imports.sort(asc)
result = [...result, ...imports]
return destructuringSort(result)
}
const sortAliases = (imports: ImportData[]) => {
const sortedImports = imports.sort(asc)
return destructuringSort(sortedImports)
}
const sortRelatives = (imports: ImportData[]) => {
const outFolderImports = []
const currentFolderImports = []
for (const importData of imports) {
if (importData.path.startsWith('./')) {
currentFolderImports.push(importData)
} else {
outFolderImports.push(importData)
}
}
outFolderImports.sort(desc)
currentFolderImports.sort(desc)
return destructuringSort(outFolderImports.concat(currentFolderImports))
}
const destructuringSort = (imports: ImportData[]) => {
const result = []
for (const importData of imports) {
const searchResult = importData.raw.match(/\{[\s\S]+?}/gm)
if (searchResult) {
const importElementsString = searchResult[0].replace(/[{}\s]/gm, '')
const importElements = importElementsString
.split(',')
.filter((importElement) => importElement)
importElements.sort(function (a, b) {
if (a.length === b.length) {
return a.localeCompare(b)
} else {
return a.length - b.length
}
})
result.push({
raw: importData.raw.replace(/\{[\s\S]+?}/gm, `{ ${importElements.join(',')} }`),
path: importData.path,
})
} else {
result.push(importData)
}
}
return result
}
|
export const sortImportGroups = (inputGroups: ImportGroups) => {
|
return {
libraries: sortLibraries(inputGroups.libraries),
aliases: sortAliases(inputGroups.aliases),
relatives: sortRelatives(inputGroups.relatives),
directRelatives: sortRelatives(inputGroups.directRelatives),
}
}
|
src/utils/sort-import-groups.ts
|
crmapache-prettier-plugin-sort-react-imports-a237c21
|
[
{
"filename": "src/utils/prepare-code.ts",
"retrieved_chunk": "import { ImportGroups } from '../types'\nexport const prepareCode = (importGroups: ImportGroups) => {\n let result = ''\n for (const importData of importGroups.libraries) {\n result += `${importData.raw}\\n`\n }\n result += '\\n'\n for (const importData of importGroups.aliases) {\n result += `${importData.raw}\\n`\n }",
"score": 0.8733301162719727
},
{
"filename": "src/utils/prepare-code.ts",
"retrieved_chunk": " result += '\\n'\n for (const importData of importGroups.relatives) {\n result += `${importData.raw}\\n`\n }\n if (importGroups.directRelatives.length > 0) {\n result += '\\n'\n for (const importData of importGroups.directRelatives) {\n result += `${importData.raw}\\n`\n }\n }",
"score": 0.8669439554214478
},
{
"filename": "src/utils/split-imports-to-groups.ts",
"retrieved_chunk": "export const splitImportsIntoGroups = (imports: Import[]): ImportGroups => {\n const libraries: ImportData[] = []\n const aliases: ImportData[] = []\n const relatives: ImportData[] = []\n const directRelatives: ImportData[] = []\n const userAliases = config.aliases\n for (const importString of imports) {\n const importSource = extractImportPath(importString)\n if (\n ((userAliases.length < 1 && importSource.startsWith('@')) ||",
"score": 0.8394937515258789
},
{
"filename": "src/utils/index.ts",
"retrieved_chunk": "export * from './split-imports-to-groups'\nexport * from './sort-import-groups'\nexport * from './prepare-code'",
"score": 0.8237107992172241
},
{
"filename": "src/utils/split-imports-to-groups.ts",
"retrieved_chunk": " matchToUserAlias(importSource, userAliases)) &&\n !isDireactAliasImport(importSource, importString)\n ) {\n aliases.push({ raw: importString, path: importSource })\n } else if (importSource.startsWith('.') && importString.includes('from')) {\n relatives.push({ raw: importString, path: importSource })\n } else if (importSource.startsWith('.') || isDireactAliasImport(importSource, importString)) {\n directRelatives.push({ raw: importString, path: importSource })\n } else {\n libraries.push({ raw: importString, path: importSource })",
"score": 0.8190192580223083
}
] |
typescript
|
export const sortImportGroups = (inputGroups: ImportGroups) => {
|
import { Node } from 'ohm-js';
import { toAST } from 'ohm-js/extras';
import { CSTParsingError } from '../errors';
import grammar from '../grammar';
export enum ConcreteNodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementOpeningTag = 'ElementOpeningTag',
ElementClosingTag = 'ElementClosingTag',
ElementSelfClosingTag = 'ElementSelfClosingTag',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type ConcreteNode =
| ConcreteTextNode
| ConcreteLiquidDropNode
| ConcreteElementOpeningTagNode
| ConcreteElementClosingTagNode
| ConcreteElementSelfClosingTagNode;
export type ConcreteBasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export type ConcreteTextNode = {
value: string;
} & ConcreteBasicNode<ConcreteNodeTypes.TextNode>;
export type ConcreteLiquidDropNode = {
value: string;
} & ConcreteBasicNode<ConcreteNodeTypes.LiquidDropNode>;
export type ConcreteElementOpeningTagNode = {
name: string;
attributes: ConcreteAttributeNode[];
} & ConcreteBasicNode<ConcreteNodeTypes.ElementOpeningTag>;
export type ConcreteElementClosingTagNode = {
name: string;
} & ConcreteBasicNode<ConcreteNodeTypes.ElementClosingTag>;
export type ConcreteElementSelfClosingTagNode = {
name: string;
attributes: ConcreteAttributeNode[];
} & ConcreteBasicNode<ConcreteNodeTypes.ElementSelfClosingTag>;
export type ConcreteAttributeNodeBase<T> = {
name: ConcreteTextNode;
value: ConcreteTextNode;
} & ConcreteBasicNode<T>;
export type ConcreteAttributeNode =
| ConcreteAttributeDoubleQuoted
| ConcreteAttributeSingleQuoted
| ConcreteAttributeUnquoted
| ConcreteAttributeEmpty;
export type ConcreteAttributeDoubleQuoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeDoubleQuoted>;
export type ConcreteAttributeSingleQuoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeSingleQuoted>;
export type ConcreteAttributeUnquoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeUnquoted>;
export type ConcreteAttributeEmpty = {
name: ConcreteTextNode;
} & ConcreteBasicNode<ConcreteNodeTypes.AttributeEmpty>;
export type CST = ConcreteNode[];
export type TemplateMapping = {
type: ConcreteNodeTypes;
locStart: (node: Node[]) => number;
locEnd: (node: Node[]) => number;
source: string;
[k: string]: string | number | boolean | object | null;
};
export type TopLevelFunctionMapping = (...nodes: Node[]) => any;
export type Mapping = {
[k: string]: number | TemplateMapping | TopLevelFunctionMapping;
};
function locStart(nodes: Node[]) {
return nodes[0].source.startIdx;
}
function locEnd(nodes: Node[]) {
return nodes[nodes.length - 1].source.endIdx;
}
export default function sourceToCST(source: string): ConcreteNode[] {
const matchResult = grammar.match(source);
if (matchResult.failed()) {
throw
|
new CSTParsingError(matchResult);
|
}
const textNode = {
type: ConcreteNodeTypes.TextNode,
locStart,
locEnd,
value: function (this: Node) {
return this.sourceString;
},
source,
};
const mapping: Mapping = {
Node: 0,
TextNode: textNode,
liquidDropNode: {
type: ConcreteNodeTypes.LiquidDropNode,
locStart,
locEnd,
source,
value: 2,
},
liquidDropValue: (node: Node) => node.sourceString.trimEnd(),
ElementNode: 0,
ElementOpeningTag: {
type: ConcreteNodeTypes.ElementOpeningTag,
locStart,
locEnd,
name: 1,
attributes: 2,
source,
},
ElementClosingTag: {
type: ConcreteNodeTypes.ElementClosingTag,
locStart,
locEnd,
name: 1,
source,
},
ElementSelfClosingTag: {
type: ConcreteNodeTypes.ElementSelfClosingTag,
locStart,
locEnd,
name: 1,
attributes: 2,
source,
},
AttributeDoubleQuoted: {
type: ConcreteNodeTypes.AttributeDoubleQuoted,
locStart,
locEnd,
source,
name: 0,
value: 3,
},
AttributeSingleQuoted: {
type: ConcreteNodeTypes.AttributeSingleQuoted,
locStart,
locEnd,
source,
name: 0,
value: 3,
},
AttributeUnquoted: {
type: ConcreteNodeTypes.AttributeUnquoted,
locStart,
locEnd,
source,
name: 0,
value: 2,
},
AttributeEmpty: {
type: ConcreteNodeTypes.AttributeEmpty,
locStart,
locEnd,
source,
name: 0,
},
attributeName: textNode,
attributeDoubleQuotedValue: 0,
attributeSingleQuotedValue: 0,
attributeUnquotedValue: 0,
attributeDoubleQuotedTextNode: textNode,
attributeSingleQuotedTextNode: textNode,
attributeUnquotedTextNode: textNode,
};
const cst = toAST(matchResult, mapping) as ConcreteNode[];
return cst;
}
|
src/parser/1-source-to-cst/index.ts
|
unshopable-liquidx-a101873
|
[
{
"filename": "src/parser/2-cst-to-ast/index.ts",
"retrieved_chunk": " }\n return astBuilder.finish();\n}\nexport default function sourceToAST(source: string): LiquidXNode[] {\n const cst = sourceToCST(source);\n const ast = cstToAST(cst);\n return ast;\n}",
"score": 0.8672729134559631
},
{
"filename": "src/parser/2-cst-to-ast/index.ts",
"retrieved_chunk": "import sourceToCST, {\n ConcreteAttributeNode,\n ConcreteElementOpeningTagNode,\n ConcreteElementSelfClosingTagNode,\n ConcreteLiquidDropNode,\n ConcreteNode,\n ConcreteNodeTypes,\n ConcreteTextNode,\n} from '../1-source-to-cst';\nimport { UnknownConcreteNodeTypeError } from '../errors';",
"score": 0.849249005317688
},
{
"filename": "src/parser/2-cst-to-ast/ast-builder.ts",
"retrieved_chunk": " source: string;\n constructor(source: string) {\n this.ast = [];\n this.cursor = [];\n this.source = source;\n }\n get current(): LiquidXNode[] {\n return deepGet<LiquidXNode[]>(this.cursor, this.ast);\n }\n get currentPosition(): number {",
"score": 0.837570309638977
},
{
"filename": "src/parser/errors.ts",
"retrieved_chunk": "}\nexport class CSTParsingError extends LoggableError {\n constructor(matchResult: ohm.MatchResult) {\n super({ result: matchResult.message ?? '' });\n this.name = 'CSTParsingError';\n }\n}\nexport class UnknownConcreteNodeTypeError extends LoggableError {\n constructor(message: string, source: string, locStart: number, locEnd: number) {\n super({ result: undefined, message, source, locStart, locEnd });",
"score": 0.8372110724449158
},
{
"filename": "src/parser/2-cst-to-ast/index.ts",
"retrieved_chunk": " for (let i = 0; i < cst.length; i += 1) {\n const node = cst[i];\n const prevNode = cst[i - 1];\n // Add whitespaces and linebreaks that went missing after parsing. We don't need to do this\n // if the node is an attribute since whitespaces between attributes is not important to preserve.\n // In fact it would probably break the rendered output due to unexpected text nodes.\n // TODO: This should be handled in the grammar/source-to-cst part instead (if possible).\n if (prevNode?.source && !isAttributeNode(node)) {\n const diff = node.locStart - prevNode.locEnd;\n if (diff > 0) {",
"score": 0.8339542746543884
}
] |
typescript
|
new CSTParsingError(matchResult);
|
import sourceToCST, {
ConcreteAttributeNode,
ConcreteElementOpeningTagNode,
ConcreteElementSelfClosingTagNode,
ConcreteLiquidDropNode,
ConcreteNode,
ConcreteNodeTypes,
ConcreteTextNode,
} from '../1-source-to-cst';
import { UnknownConcreteNodeTypeError } from '../errors';
import ASTBuilder from './ast-builder';
export type BasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export enum NodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementNode = 'ElementNode',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type TextNode = {
value: string;
} & BasicNode<NodeTypes.TextNode>;
export type LiquidDropNode = {
value: string;
} & BasicNode<NodeTypes.LiquidDropNode>;
export type LiquidXNode = TextNode | LiquidDropNode | ElementNode | AttributeNode;
export type ElementNode = {
name: string;
source: string;
attributes: AttributeNode[];
children: LiquidXNode[];
} & BasicNode<NodeTypes.ElementNode>;
export type AttributeNode =
| AttributeDoubleQuoted
| AttributeSingleQuoted
| AttributeUnquoted
| AttributeEmpty;
export type AttributeNodeBase<T> = {
name: TextNode;
value: TextNode | LiquidDropNode;
} & BasicNode<T>;
export type AttributeDoubleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeDoubleQuoted>;
export type AttributeSingleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeSingleQuoted>;
export type AttributeUnquoted = {} & AttributeNodeBase<NodeTypes.AttributeUnquoted>;
export type AttributeEmpty = { name: TextNode } & BasicNode<NodeTypes.AttributeEmpty>;
function toTextNode(node: ConcreteTextNode): TextNode {
return {
type: NodeTypes.TextNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toLiquidDropNode(node: ConcreteLiquidDropNode): LiquidDropNode {
return {
type: NodeTypes.LiquidDropNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toElementNode(
node: ConcreteElementOpeningTagNode | ConcreteElementSelfClosingTagNode,
): ElementNode {
return {
type: NodeTypes.ElementNode,
locStart: node.locStart,
locEnd: node.locEnd,
name: node.name,
source: node.source,
attributes: toAttributes(node.attributes),
children: [],
};
}
function toAttributes(attributes: ConcreteAttributeNode[]) {
return cstToAST(attributes) as AttributeNode[];
}
function toAttributeValue(value: ConcreteTextNode | ConcreteLiquidDropNode) {
return cstToAST([value])[0] as TextNode | LiquidDropNode;
}
function isAttributeNode(node: any): boolean {
return (
node.type === ConcreteNodeTypes.AttributeDoubleQuoted ||
node.type === ConcreteNodeTypes.AttributeSingleQuoted ||
node.type === ConcreteNodeTypes.AttributeUnquoted ||
node.type === ConcreteNodeTypes.AttributeEmpty
);
}
function cstToAST(cst: ConcreteNode[] | ConcreteAttributeNode[]) {
if (cst.length === 0) return [];
const astBuilder = new ASTBuilder(cst[0].source);
for (let i = 0; i < cst.length; i += 1) {
const node = cst[i];
const prevNode = cst[i - 1];
// Add whitespaces and linebreaks that went missing after parsing. We don't need to do this
// if the node is an attribute since whitespaces between attributes is not important to preserve.
// In fact it would probably break the rendered output due to unexpected text nodes.
// TODO: This should be handled in the grammar/source-to-cst part instead (if possible).
if (prevNode?.source && !isAttributeNode(node)) {
const diff = node.locStart - prevNode.locEnd;
if (diff > 0) {
astBuilder.push(
toTextNode({
type: ConcreteNodeTypes.TextNode,
locStart: prevNode.locEnd,
locEnd: node.locStart,
source: node.source,
value: prevNode.source.slice(prevNode.locEnd, node.locStart),
}),
);
}
}
switch (node.type) {
case ConcreteNodeTypes.TextNode: {
astBuilder.push(toTextNode(node));
break;
}
case ConcreteNodeTypes.LiquidDropNode: {
astBuilder.push(toLiquidDropNode(node));
break;
}
case ConcreteNodeTypes.ElementOpeningTag: {
astBuilder.open(toElementNode(node));
break;
}
case ConcreteNodeTypes.ElementClosingTag: {
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.ElementSelfClosingTag: {
astBuilder.open(toElementNode(node));
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.AttributeDoubleQuoted:
case ConcreteNodeTypes.AttributeSingleQuoted:
case ConcreteNodeTypes.AttributeUnquoted: {
const attributeNode: AttributeDoubleQuoted | AttributeSingleQuoted | AttributeUnquoted = {
type: node.type as unknown as
| NodeTypes.AttributeDoubleQuoted
| NodeTypes.AttributeSingleQuoted
| NodeTypes.AttributeUnquoted,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
value: toAttributeValue(node.value),
};
astBuilder.push(attributeNode);
break;
}
case ConcreteNodeTypes.AttributeEmpty: {
const attributeNode: AttributeEmpty = {
type: NodeTypes.AttributeEmpty,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
};
astBuilder.push(attributeNode);
break;
}
default: {
throw new UnknownConcreteNodeTypeError(
'',
(node as any)?.source,
(node as any)?.locStart,
(node as any)?.locEnd,
);
}
}
}
|
return astBuilder.finish();
|
}
export default function sourceToAST(source: string): LiquidXNode[] {
const cst = sourceToCST(source);
const ast = cstToAST(cst);
return ast;
}
|
src/parser/2-cst-to-ast/index.ts
|
unshopable-liquidx-a101873
|
[
{
"filename": "src/parser/errors.ts",
"retrieved_chunk": " this.name = 'UnknownConcreteNodeTypeError';\n }\n}\nexport class ASTParsingError extends LoggableError {\n constructor(message: string, source: string, locStart: number, locEnd: number) {\n super({ result: undefined, message, source, locStart, locEnd });\n this.name = 'ASTParsingError';\n }\n}",
"score": 0.854373037815094
},
{
"filename": "src/parser/errors.ts",
"retrieved_chunk": "}\nexport class CSTParsingError extends LoggableError {\n constructor(matchResult: ohm.MatchResult) {\n super({ result: matchResult.message ?? '' });\n this.name = 'CSTParsingError';\n }\n}\nexport class UnknownConcreteNodeTypeError extends LoggableError {\n constructor(message: string, source: string, locStart: number, locEnd: number) {\n super({ result: undefined, message, source, locStart, locEnd });",
"score": 0.8255723714828491
},
{
"filename": "src/parser/2-cst-to-ast/ast-builder.ts",
"retrieved_chunk": " finish() {\n if (this.cursor.length > 0) {\n throw new ASTParsingError(\n `LiquidX element '${this.parent?.name}' has no corresponding closing tag.`,\n this.source,\n this.parent?.locStart ?? this.source.length - 1,\n this.parent?.locEnd ?? this.source.length,\n );\n }\n return this.ast;",
"score": 0.816443145275116
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": "export default function sourceToCST(source: string): ConcreteNode[] {\n const matchResult = grammar.match(source);\n if (matchResult.failed()) {\n throw new CSTParsingError(matchResult);\n }\n const textNode = {\n type: ConcreteNodeTypes.TextNode,\n locStart,\n locEnd,\n value: function (this: Node) {",
"score": 0.8162981867790222
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " | ConcreteElementSelfClosingTagNode;\nexport type ConcreteBasicNode<T> = {\n type: T;\n locStart: number;\n locEnd: number;\n source: string;\n};\nexport type ConcreteTextNode = {\n value: string;\n} & ConcreteBasicNode<ConcreteNodeTypes.TextNode>;",
"score": 0.8129236698150635
}
] |
typescript
|
return astBuilder.finish();
|
import sourceToCST, {
ConcreteAttributeNode,
ConcreteElementOpeningTagNode,
ConcreteElementSelfClosingTagNode,
ConcreteLiquidDropNode,
ConcreteNode,
ConcreteNodeTypes,
ConcreteTextNode,
} from '../1-source-to-cst';
import { UnknownConcreteNodeTypeError } from '../errors';
import ASTBuilder from './ast-builder';
export type BasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export enum NodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementNode = 'ElementNode',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type TextNode = {
value: string;
} & BasicNode<NodeTypes.TextNode>;
export type LiquidDropNode = {
value: string;
} & BasicNode<NodeTypes.LiquidDropNode>;
export type LiquidXNode = TextNode | LiquidDropNode | ElementNode | AttributeNode;
export type ElementNode = {
name: string;
source: string;
attributes: AttributeNode[];
children: LiquidXNode[];
} & BasicNode<NodeTypes.ElementNode>;
export type AttributeNode =
| AttributeDoubleQuoted
| AttributeSingleQuoted
| AttributeUnquoted
| AttributeEmpty;
export type AttributeNodeBase<T> = {
name: TextNode;
value: TextNode | LiquidDropNode;
} & BasicNode<T>;
export type AttributeDoubleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeDoubleQuoted>;
export type AttributeSingleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeSingleQuoted>;
export type AttributeUnquoted = {} & AttributeNodeBase<NodeTypes.AttributeUnquoted>;
export type AttributeEmpty = { name: TextNode } & BasicNode<NodeTypes.AttributeEmpty>;
function toTextNode(node: ConcreteTextNode): TextNode {
return {
type: NodeTypes.TextNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toLiquidDropNode(node: ConcreteLiquidDropNode): LiquidDropNode {
return {
type: NodeTypes.LiquidDropNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toElementNode(
node: ConcreteElementOpeningTagNode | ConcreteElementSelfClosingTagNode,
): ElementNode {
return {
type: NodeTypes.ElementNode,
locStart: node.locStart,
locEnd: node.locEnd,
name: node.name,
source: node.source,
attributes: toAttributes(node.attributes),
children: [],
};
}
function toAttributes(attributes: ConcreteAttributeNode[]) {
return cstToAST(attributes) as AttributeNode[];
}
function toAttributeValue(value: ConcreteTextNode | ConcreteLiquidDropNode) {
return cstToAST([value])[0] as TextNode | LiquidDropNode;
}
function isAttributeNode(node: any): boolean {
return (
node.type === ConcreteNodeTypes.AttributeDoubleQuoted ||
node.type === ConcreteNodeTypes.AttributeSingleQuoted ||
node.type === ConcreteNodeTypes.AttributeUnquoted ||
node.type === ConcreteNodeTypes.AttributeEmpty
);
}
function cstToAST(cst: ConcreteNode[] | ConcreteAttributeNode[]) {
if (cst.length === 0) return [];
const astBuilder = new ASTBuilder(cst[0].source);
for (let i = 0; i < cst.length; i += 1) {
const node = cst[i];
const prevNode = cst[i - 1];
// Add whitespaces and linebreaks that went missing after parsing. We don't need to do this
// if the node is an attribute since whitespaces between attributes is not important to preserve.
// In fact it would probably break the rendered output due to unexpected text nodes.
// TODO: This should be handled in the grammar/source-to-cst part instead (if possible).
if (prevNode?.source && !isAttributeNode(node)) {
const diff = node.locStart - prevNode.locEnd;
if (diff > 0) {
astBuilder.
|
push(
toTextNode({
|
type: ConcreteNodeTypes.TextNode,
locStart: prevNode.locEnd,
locEnd: node.locStart,
source: node.source,
value: prevNode.source.slice(prevNode.locEnd, node.locStart),
}),
);
}
}
switch (node.type) {
case ConcreteNodeTypes.TextNode: {
astBuilder.push(toTextNode(node));
break;
}
case ConcreteNodeTypes.LiquidDropNode: {
astBuilder.push(toLiquidDropNode(node));
break;
}
case ConcreteNodeTypes.ElementOpeningTag: {
astBuilder.open(toElementNode(node));
break;
}
case ConcreteNodeTypes.ElementClosingTag: {
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.ElementSelfClosingTag: {
astBuilder.open(toElementNode(node));
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.AttributeDoubleQuoted:
case ConcreteNodeTypes.AttributeSingleQuoted:
case ConcreteNodeTypes.AttributeUnquoted: {
const attributeNode: AttributeDoubleQuoted | AttributeSingleQuoted | AttributeUnquoted = {
type: node.type as unknown as
| NodeTypes.AttributeDoubleQuoted
| NodeTypes.AttributeSingleQuoted
| NodeTypes.AttributeUnquoted,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
value: toAttributeValue(node.value),
};
astBuilder.push(attributeNode);
break;
}
case ConcreteNodeTypes.AttributeEmpty: {
const attributeNode: AttributeEmpty = {
type: NodeTypes.AttributeEmpty,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
};
astBuilder.push(attributeNode);
break;
}
default: {
throw new UnknownConcreteNodeTypeError(
'',
(node as any)?.source,
(node as any)?.locStart,
(node as any)?.locEnd,
);
}
}
}
return astBuilder.finish();
}
export default function sourceToAST(source: string): LiquidXNode[] {
const cst = sourceToCST(source);
const ast = cstToAST(cst);
return ast;
}
|
src/parser/2-cst-to-ast/index.ts
|
unshopable-liquidx-a101873
|
[
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " let output = '';\n const attributes = node.attributes;\n if (withSource && !isChildOfElementNode) {\n output += renderStartMarker();\n }\n if (node.children.length > 0) {\n const captureName = `${node.name}Children`;\n output += `{% capture ${captureName} %}`;\n output += renderAST(node.children, { withSource, isChildOfElementNode: true });\n output += '{% endcapture %}';",
"score": 0.8241041898727417
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " attributes.push(childrenAttribute);\n }\n const renderedAttributes = node.attributes.map((attribute) => renderAST([attribute]));\n const separator = ', ';\n const attributesString =\n renderedAttributes.length > 0 ? `${separator}${renderedAttributes.join(separator)}` : '';\n output += `{% render '${node.name}'${attributesString} %}`;\n if (withSource && !isChildOfElementNode) {\n output += renderEndMarker(node);\n }",
"score": 0.8238285779953003
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " { withSource = false, isChildOfElementNode = false } = {},\n): string {\n let output = '';\n for (let i = 0; i < ast.length; i += 1) {\n const node = ast[i];\n switch (node.type) {\n case NodeTypes.TextNode: {\n output += renderText(node);\n break;\n }",
"score": 0.8231301307678223
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": "export default function sourceToCST(source: string): ConcreteNode[] {\n const matchResult = grammar.match(source);\n if (matchResult.failed()) {\n throw new CSTParsingError(matchResult);\n }\n const textNode = {\n type: ConcreteNodeTypes.TextNode,\n locStart,\n locEnd,\n value: function (this: Node) {",
"score": 0.8142560720443726
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " const childrenAttribute: AttributeNode = {\n type: NodeTypes.AttributeDoubleQuoted,\n locStart: 0,\n locEnd: 0,\n source: '',\n name: {\n type: NodeTypes.TextNode,\n locStart: 0,\n locEnd: 0,\n source: '',",
"score": 0.8131064176559448
}
] |
typescript
|
push(
toTextNode({
|
import { type IResult, type Request } from 'mssql';
import type { StoredProcedureParameter, StoredProcedureSchema, ILogger } from '../types';
import { type DatabaseExecutor } from '../executor';
import { convertSqlValueToJsValue } from '../utils';
/**
* A manager for stored procedure metadata.
* Handles the retrieval and caching of stored procedure metadata.
*/
export class StoredProcedureMetadataManager {
/**
* Regular expression to extract MSSQL stored procedure names.
* See https://regex101.com/r/cMsTyT/1 for this regex.
*/
private static readonly storedProcedureNameRegex =
/((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))/i;
/**
* Matches any comments from the Stored Procedure definition.
* See https://regex101.com/r/dxA7n0/1 for this regex.
*/
private static readonly commentRegex = /(?:\s*-{2}.+\s*$)|(?:\/\*([\s\S]*?)\*\/)/gm;
/**
* Matches the parameters from the Stored Procedure definition.
* See https://regex101.com/r/4TaTky/1 for this regex.
*/
private static readonly parameterSectionRegex =
/(?<=(?:CREATE|ALTER)\s+PROCEDURE)\s+((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))(.*?)(?=(?:AS|FOR\s+REPLICATION)[^\w])/is;
/**
* See https://regex101.com/r/iMEaLb/1 for this regex.
* Match the individual parameters in the Parameter Definition.
*/
private static readonly parameterDefinitionRegex = /(@[\w]+)\s+([^\s]+)\s*=\s*([^, ]*),?/gi;
constructor(private readonly _databaseExecutor: DatabaseExecutor) {}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to retrieve the parameter schema for.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async getStoredProcedureParameterSchema(
storedProcedureName: string,
logger: ILogger,
): Promise<IResult<StoredProcedureSchema>> {
return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {
// Remove square bracket notation if any, and split into schema and name.
const schemaAndName = storedProcedureName.replace(/\[|\]/g, '').split('.');
const result = await request.query<StoredProcedureSchema>(
'SELECT ' +
'PARAMETER_NAME as name, ' +
'DATA_TYPE as type, ' +
'PARAMETER_MODE as mode, ' +
'CHARACTER_MAXIMUM_LENGTH length, ' +
'NUMERIC_PRECISION as precision, ' +
'NUMERIC_SCALE as scale ' +
'FROM INFORMATION_SCHEMA.PARAMETERS ' +
`WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';
SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,
);
const recordSetLength = result.recordsets.length as number;
if (recordSetLength < 1 || recordSetLength > 2) {
throw new Error(
`Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,
);
}
if (recordSetLength !== 2 || result.recordsets[1].length !== 1) {
throw new Error(
`Could not retrieve stored procedure definition from Database for stored procedure ${storedProcedureName}.`,
);
}
return result;
}, logger);
}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to parse the parameter schema for.
* @param {IResult<StoredProcedureSchema>} schemaResult - The result of the stored procedure parameter schema query.
* @returns A StoredProcedureParameter array.
*/
public parseStoredProcedureParameters(
storedProcedureName: string,
schemaResult: IResult<StoredProcedureSchema>,
): IterableIterator<StoredProcedureParameter> {
const parameterSchemaMap: Map<string, StoredProcedureParameter> =
schemaResult.recordsets[0].reduce(
(parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {
|
parameterMap.set(item.name, item);
|
return parameterMap;
},
new Map<string, StoredProcedureParameter>(),
);
const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;
if (storedProcedureDefinition == null) {
throw new Error(
`Could not parse stored procedure definition for stored procedure ${storedProcedureName}.`,
);
}
const commentStrippedStoredProcedureDefinition = storedProcedureDefinition.replace(
StoredProcedureMetadataManager.commentRegex,
'',
);
if (commentStrippedStoredProcedureDefinition === '') {
throw new Error(
`Could not parse stored procedure comments from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterSection = commentStrippedStoredProcedureDefinition.match(
StoredProcedureMetadataManager.parameterSectionRegex,
);
if (parameterSection === null || parameterSection.length !== 9) {
throw new Error(
`Could not parse stored procedure parameters from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterDefinition = parameterSection[8];
let parameterDefinitionMatch;
while (
(parameterDefinitionMatch =
StoredProcedureMetadataManager.parameterDefinitionRegex.exec(parameterDefinition)) !== null
) {
const name = parameterDefinitionMatch[1];
const type = parameterDefinitionMatch[2];
const defaultValue = parameterDefinitionMatch[3];
const parameter = parameterSchemaMap.get(name);
if (parameter !== undefined) {
parameter.defaultValue = convertSqlValueToJsValue(defaultValue, type);
}
}
return parameterSchemaMap.values();
}
}
|
src/lib/stored-procedure/stored-procedure-metadata-manager.ts
|
Falven-mssql-data-source-bca6621
|
[
{
"filename": "src/lib/types/stored-procedure-schema.ts",
"retrieved_chunk": "import type { StoredProcedureParameter } from '.';\n/**\n * Represents the result of a stored procedure execution.\n */\nexport type StoredProcedureSchema = [\n StoredProcedureParameter,\n {\n storedProcedureDefinition: string;\n },\n];",
"score": 0.8719191551208496
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " const preparedResult = this.prepareStoredProcedureResult(result, info);\n logPerformance(logger, 'prepareStoredProcedureResult', startTime);\n return preparedResult;\n }\n private prepareParameters(\n storedProcedureParameters: IterableIterator<StoredProcedureParameter>,\n input: InputParameters,\n ): Map<string, PreparedStoredProcedureParameter> {\n // We want to use the inferred DB Stored Procedure schema as the source of truth.\n const preparedParameters = new Map<string, PreparedStoredProcedureParameter>();",
"score": 0.8570138216018677
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " for (const spParameter of storedProcedureParameters) {\n const { name, type, length, precision, scale, ...rest } = spParameter;\n const parameterName = name.slice(1);\n // Let's use the parameter name in lowercase as the lookup key.\n preparedParameters.set(parameterName.toLowerCase(), {\n name: parameterName,\n type: mapDbTypeToDriverType({\n type,\n length,\n precision,",
"score": 0.846399188041687
},
{
"filename": "src/lib/types/i-stored-procedure-parameter.ts",
"retrieved_chunk": " */\nexport interface StoredProcedureParameter {\n name: string;\n type: string;\n mode: ParameterMode;\n defaultValue?: unknown;\n length?: number;\n precision?: number;\n scale?: number;\n}",
"score": 0.820986807346344
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " schema,\n );\n logPerformance(logger, 'parseStoredProcedureParameters', startTime);\n startTime = performance.now();\n const preparedRequest = this.prepareStoredProcedureRequest(\n storedProcedureParameters,\n input,\n request,\n );\n logPerformance(logger, 'prepareStoredProcedureRequest', startTime);",
"score": 0.8119103908538818
}
] |
typescript
|
parameterMap.set(item.name, item);
|
import { camelCase } from 'lodash';
import { type Request, type IProcedureResult, type IResult, type IRecordSet } from 'mssql';
import { type GraphQLResolveInfo } from 'graphql';
import {
type DriverType,
type PreparedStoredProcedureParameter,
ParameterMode,
type StoredProcedureSchema,
type StoredProcedureParameter,
type ILogger,
type InputParameters,
} from '../types';
import { mapDbTypeToDriverType, replacer } from '../utils';
import { logExecutionBegin, logPerformance, logSafely } from '../logging';
import {
type StoredProcedureCacheManager,
type StoredProcedureMetadataManager,
} from '../stored-procedure';
import { type IResolverProcedureResult } from '../types/i-resolver-procedure-result';
import { getNodeSelectionSetNames, getFieldNamesExcludingNode } from '../utils/graphql-helper';
/**
* StoredProcedureManager provides methods to interact
* with a Microsoft SQL Server database for managing stored procedures.
*/
export class StoredProcedureManager {
/**
* Creates a new instance of StoredProcedureManager.
*/
constructor(
private readonly _storedProcedureCacheManager: StoredProcedureCacheManager,
private readonly _storedProcedureMetadataManager: StoredProcedureMetadataManager,
) {}
/**
* Executes a stored procedure with the provided input parameters, and returns the result.
* @template TVal - The type of records in the result set.
* @template TRet - The type of the result object to be returned.
* @param {string} storedProcedureName - The name of the stored procedure to execute.
* @param {StoredProcedureInput} input - The input parameters for the stored procedure.
* @param {Request} request - The request to execute the stored procedure.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @param {ILogger} logger - The logger to use for logging.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async executeStoredProcedure<T>(
storedProcedureName: string,
input: InputParameters,
request: Request,
logger: ILogger,
info?: GraphQLResolveInfo,
): Promise<IResolverProcedureResult<T>> {
let startTime = performance.now();
let schema = (await this._storedProcedureCacheManager.tryGetFromCache(storedProcedureName)) as
| IResult<StoredProcedureSchema>
| undefined;
if (schema === undefined) {
logSafely(
logger,
'info',
// Yellow
`\x1b[33mCache miss occurred while retrieving the cached schema for ${storedProcedureName}\x1b[0m`,
);
schema = await this._storedProcedureMetadataManager.getStoredProcedureParameterSchema(
storedProcedureName,
logger,
);
await this._storedProcedureCacheManager.addToCache(storedProcedureName, schema);
} else {
logSafely(
logger,
'info',
// Green
`\x1b[32mCache hit occurred while retrieving the cached schema for ${storedProcedureName}\x1b[0m`,
);
}
logPerformance(logger, 'getStoredProcedureParameterSchema', startTime);
startTime = performance.now();
const storedProcedureParameters =
this._storedProcedureMetadataManager.parseStoredProcedureParameters(
storedProcedureName,
schema,
);
logPerformance(logger, 'parseStoredProcedureParameters', startTime);
startTime = performance.now();
const preparedRequest = this.prepareStoredProcedureRequest(
storedProcedureParameters,
input,
request,
);
logPerformance(logger, 'prepareStoredProcedureRequest', startTime);
startTime = performance.now();
logExecutionBegin(
logger,
`Stored Procedure ${storedProcedureName} with parameters`,
preparedRequest.parameters,
// Green
'32m',
);
const result = await preparedRequest.execute(storedProcedureName);
startTime = performance.now();
const preparedResult = this.prepareStoredProcedureResult(result, info);
logPerformance(logger, 'prepareStoredProcedureResult', startTime);
return preparedResult;
}
private prepareParameters(
storedProcedureParameters: IterableIterator<StoredProcedureParameter>,
input: InputParameters,
): Map<string, PreparedStoredProcedureParameter> {
// We want to use the inferred DB Stored Procedure schema as the source of truth.
const preparedParameters = new Map<string, PreparedStoredProcedureParameter>();
for (const spParameter of storedProcedureParameters) {
const { name, type, length, precision, scale, ...rest } = spParameter;
const parameterName = name.slice(1);
// Let's use the parameter name in lowercase as the lookup key.
preparedParameters.set(parameterName.toLowerCase(), {
name: parameterName,
type: mapDbTypeToDriverType({
type,
length,
precision,
scale,
}) as DriverType,
value: undefined,
...rest,
});
}
// Populate our input values into the request parameters.
const inputParameters = input as Record<string, unknown>;
for (const inputParameterKey in inputParameters) {
const preparedParameter = preparedParameters.get(inputParameterKey.toLowerCase());
if (preparedParameter != null) {
preparedParameter.value = inputParameters[inputParameterKey];
}
// We don't care about provided input parameters that are missing in the Stored Procedure definition.
}
return preparedParameters;
}
private getMissingRequiredParameters(
parameters: Map<string, PreparedStoredProcedureParameter>,
): PreparedStoredProcedureParameter[] {
// Check what required parameters are missing.
const missingRequiredParameters = [];
for (const parameter of parameters.values()) {
// If they have a default value they can be ommitted from the request.
if (parameter.defaultValue === undefined && parameter.value === undefined) {
missingRequiredParameters.push(parameter);
}
}
return missingRequiredParameters;
}
private addParametersToRequest(
parameters: Map<string, PreparedStoredProcedureParameter>,
request: Request,
): Request {
const preparedRequest = request;
for (const parameter of parameters.values()) {
const { name, type, mode, value, defaultValue } = parameter;
if (defaultValue !== undefined && value === undefined) {
continue;
}
const modeEnum = mode;
if (modeEnum === ParameterMode.IN) {
preparedRequest.input(name, type, value);
} else if (modeEnum === ParameterMode.INOUT) {
preparedRequest.output(name, type, value);
} else {
throw new Error(`Unknown parameter mode: ${mode}`);
}
}
return preparedRequest;
}
/**
* Prepares the stored procedure request.
* @param {IterableIterator<StoredProcedureParameter>} storedProcedureParameters - The stored procedure parameters.
* @param {StoredProcedureInput} input - The input object.
* @param {Request} request - The request object.
* @returns A prepared request object.
*/
private prepareStoredProcedureRequest(
storedProcedureParameters: IterableIterator<StoredProcedureParameter>,
input: InputParameters,
request: Request,
): Request {
const parameters = this.prepareParameters(storedProcedureParameters, input);
const missingRequiredParameters = this.getMissingRequiredParameters(parameters);
const missingLength = missingRequiredParameters.length;
if (missingLength > 0) {
throw new Error(
`Missing ${missingLength} required parameters: ${missingRequiredParameters
.map((param) => JSON.stringify(param, replacer, 0))
.join(', ')}.`,
);
}
const preparedRequest = this.addParametersToRequest(parameters, request);
return preparedRequest;
}
/**
* Maps the keys of an object based on the provided mapping.
* @template T - The type of the original object.
* @param {T} obj - The object whose keys need to be mapped.
* @param {Record<string, string>} mapping - A dictionary containing the mapping of the original keys to the new keys.
* @returns {T} A new object with the keys mapped according to the provided mapping.
*/
private mapKeysWithMapping<T extends Record<string, unknown>>(
obj: T,
mapping: Record<string, string>,
): T {
const result: Record<string, unknown> = {};
for (const key in obj) {
const mappedKey = mapping[key.toLowerCase()] ?? camelCase(key);
result[mappedKey] = obj[key];
}
return result as T;
}
/**
* Prepares the stored procedure result into a GraphQL result object.
* @param {IProcedureResult} result - The stored procedure result.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @returns {IResolverProcedureResult} A prepared GraphQL result object.
*/
private prepareStoredProcedureResult<T extends Record<string, unknown>>(
result: IProcedureResult<T>,
info?: GraphQLResolveInfo,
): IResolverProcedureResult<T> {
const { resultSetFields, outputFields } =
info !== undefined
? {
resultSetFields:
|
getNodeSelectionSetNames(info, 'resultSets'),
outputFields: getFieldNamesExcludingNode(info, 'resultSets'),
}
|
: { resultSetFields: {}, outputFields: {} };
const resultSets = result.recordsets.map((recordset: IRecordSet<Record<string, unknown>>) => {
return recordset.map((record: Record<string, unknown>) =>
this.mapKeysWithMapping(record, resultSetFields),
);
});
const output = this.mapKeysWithMapping(result.output, outputFields);
const preparedResult = {
returnValue: result.returnValue,
resultSets: resultSets as T[][],
rowsAffected: result.rowsAffected,
...output,
};
return preparedResult;
}
}
|
src/lib/stored-procedure/stored-procedure-manager.ts
|
Falven-mssql-data-source-bca6621
|
[
{
"filename": "src/lib/types/i-resolver-procedure-result.ts",
"retrieved_chunk": "/**\n * Represents a GraphQL resolver stored procedure result.\n * The format of the result is: a single resultSet property, followed by\n * any additional output properties that were returned by the stored procedure.\n */\nexport interface IResolverProcedureResult<T> extends Record<string, unknown> {\n returnValue?: number;\n resultSets?: T[][];\n rowsAffected?: number[];\n}",
"score": 0.8468179702758789
},
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " info?: GraphQLResolveInfo,\n ): Promise<IResolverProcedureResult<T>> {\n const startTime = performance.now();\n const logger = this._queryLogger;\n logExecutionBegin(logger, `Stored Procedure Query ${storedProcedureName} with inputs`, input);\n const result = await this._databaseExecutor.executeQueryRequest(\n async (request: Request): Promise<IResolverProcedureResult<T>> =>\n await this._storedProcedureManager.executeStoredProcedure<T>(\n storedProcedureName,\n input,",
"score": 0.8432685732841492
},
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " storedProcedureName: string,\n input: InputParameters,\n info?: GraphQLResolveInfo,\n ): Promise<IResolverProcedureResult<T>> {\n const startTime = performance.now();\n const logger = this._mutationLogger;\n logExecutionBegin(logger, `Stored Procedure Mutation ${storedProcedureName}`, input);\n const result = await this._databaseExecutor.executeMutationRequest(\n async (request: Request): Promise<IResolverProcedureResult<T>> =>\n await this._storedProcedureManager.executeStoredProcedure(",
"score": 0.8424292802810669
},
{
"filename": "src/lib/utils/graphql-helper.ts",
"retrieved_chunk": " info: GraphQLResolveInfo,\n nodeName: string,\n): Record<string, string> {\n const targetNode = findNodeByName(info, nodeName);\n // If the target node is not found, return an empty dictionary\n if (targetNode === undefined) {\n return {};\n }\n // If the target node is found, return its subfield names\n return getSelectionSetNames(targetNode);",
"score": 0.8288595676422119
},
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " * @template T - This type parameter represents the type of the value returned by the resolver procedure.\n * @param {string} storedProcedureName - The name of the stored procedure to execute.\n * @param {StoredProcedureInput} input - The input parameters for the stored procedure.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored\n * procedure results to the correct schema field names.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async executeStoredProcedureQuery<T>(\n storedProcedureName: string,\n input: InputParameters,",
"score": 0.8188542723655701
}
] |
typescript
|
getNodeSelectionSetNames(info, 'resultSets'),
outputFields: getFieldNamesExcludingNode(info, 'resultSets'),
}
|
import type { Request } from 'mssql';
import { type GraphQLResolveInfo } from 'graphql';
import { DevConsoleLogger, logExecutionBegin, logExecutionEnd, logSafely } from '../logging';
import { DatabaseExecutor } from '../executor';
import { ConnectionManager } from '../utils';
import {
StoredProcedureManager,
StoredProcedureCacheManager,
StoredProcedureMetadataManager,
} from '../stored-procedure';
import type { MSSQLOptions, ILogger, IResolverProcedureResult, InputParameters } from '../types';
/**
* A GraphQL DataSource backed by a Microsoft SQL Server database.
* Maintains separate caching for Query and Mutation operations.
* Maintains a global connection pool cache to reuse connections.
*/
export class MSSQLDataSource {
private readonly _queryOptions: MSSQLOptions;
private readonly _mutationOptions: MSSQLOptions;
private readonly _queryLogger: ILogger;
private readonly _mutationLogger: ILogger;
private readonly _connectionManager: ConnectionManager;
private readonly _databaseExecutor: DatabaseExecutor;
private readonly _storedProcedureMetadataManager: StoredProcedureMetadataManager;
private readonly _storedProcedureCacheManager: StoredProcedureCacheManager;
private readonly _storedProcedureManager: StoredProcedureManager;
/**
* Creates a new MSSQLDataSource with the given options.
* @param queryOptions The options for Query operations
* @param mutationOptions The options for Mutation operations
*/
constructor(
queryOptions: MSSQLOptions = MSSQLDataSource.defaultOptions,
mutationOptions: MSSQLOptions = MSSQLDataSource.defaultOptions,
) {
this._queryOptions = queryOptions;
this._mutationOptions = mutationOptions;
const defaultOptions = MSSQLDataSource.defaultOptions;
this._queryLogger =
queryOptions.logger !== undefined ? queryOptions.logger : (defaultOptions.logger as ILogger);
this._mutationLogger =
mutationOptions.logger !== undefined
? mutationOptions.logger
: (defaultOptions.logger as ILogger);
this._connectionManager = new ConnectionManager(
this._queryOptions.config,
this._mutationOptions.config,
);
this._databaseExecutor = new DatabaseExecutor(this._connectionManager);
this._storedProcedureMetadataManager = new StoredProcedureMetadataManager(
this._databaseExecutor,
);
this._storedProcedureCacheManager = new StoredProcedureCacheManager();
this._storedProcedureManager = new StoredProcedureManager(
this._storedProcedureCacheManager,
this._storedProcedureMetadataManager,
);
}
/**
* Executes a stored procedure for a Query operation with the provided input parameters, and returns the result.
* @template T - This type parameter represents the type of the value returned by the resolver procedure.
* @param {string} storedProcedureName - The name of the stored procedure to execute.
* @param {StoredProcedureInput} input - The input parameters for the stored procedure.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async executeStoredProcedureQuery<T>(
storedProcedureName: string,
|
input: InputParameters,
info?: GraphQLResolveInfo,
): Promise<IResolverProcedureResult<T>> {
|
const startTime = performance.now();
const logger = this._queryLogger;
logExecutionBegin(logger, `Stored Procedure Query ${storedProcedureName} with inputs`, input);
const result = await this._databaseExecutor.executeQueryRequest(
async (request: Request): Promise<IResolverProcedureResult<T>> =>
await this._storedProcedureManager.executeStoredProcedure<T>(
storedProcedureName,
input,
request,
logger,
info,
),
logger,
);
logExecutionEnd(logger, `Stored Procedure Query ${storedProcedureName}`, startTime);
logSafely(logger, 'info', `------------------`);
return result;
}
/**
* Executes a stored procedure for a Mutation operation with the provided input parameters, and returns the result.
* @template T - This type parameter represents the type of the value returned by the resolver procedure.
* @param {string} storedProcedureName - The name of the stored procedure to execute.
* @param {StoredProcedureInput} input - The input parameters for the stored procedure.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async executeStoredProcedureMutation<T>(
storedProcedureName: string,
input: InputParameters,
info?: GraphQLResolveInfo,
): Promise<IResolverProcedureResult<T>> {
const startTime = performance.now();
const logger = this._mutationLogger;
logExecutionBegin(logger, `Stored Procedure Mutation ${storedProcedureName}`, input);
const result = await this._databaseExecutor.executeMutationRequest(
async (request: Request): Promise<IResolverProcedureResult<T>> =>
await this._storedProcedureManager.executeStoredProcedure(
storedProcedureName,
input,
request,
logger,
info,
),
logger,
);
logExecutionEnd(logger, `Stored Procedure Mutation ${storedProcedureName}`, startTime);
return result;
}
/**
* Default options for the Query and Mutation global connection pool cache.
*/
private static get defaultOptions(): MSSQLOptions {
return {
config: {
user: '',
password: '',
server: '',
database: '',
},
logger: new DevConsoleLogger(),
};
}
}
|
src/lib/datasource/mssql-datasource.ts
|
Falven-mssql-data-source-bca6621
|
[
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " * procedure results to the correct schema field names.\n * @param {ILogger} logger - The logger to use for logging.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async executeStoredProcedure<T>(\n storedProcedureName: string,\n input: InputParameters,\n request: Request,\n logger: ILogger,\n info?: GraphQLResolveInfo,",
"score": 0.8947162628173828
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " private readonly _storedProcedureMetadataManager: StoredProcedureMetadataManager,\n ) {}\n /**\n * Executes a stored procedure with the provided input parameters, and returns the result.\n * @template TVal - The type of records in the result set.\n * @template TRet - The type of the result object to be returned.\n * @param {string} storedProcedureName - The name of the stored procedure to execute.\n * @param {StoredProcedureInput} input - The input parameters for the stored procedure.\n * @param {Request} request - The request to execute the stored procedure.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored",
"score": 0.8442472219467163
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " */\n private prepareStoredProcedureResult<T extends Record<string, unknown>>(\n result: IProcedureResult<T>,\n info?: GraphQLResolveInfo,\n ): IResolverProcedureResult<T> {\n const { resultSetFields, outputFields } =\n info !== undefined\n ? {\n resultSetFields: getNodeSelectionSetNames(info, 'resultSets'),\n outputFields: getFieldNamesExcludingNode(info, 'resultSets'),",
"score": 0.8343889117240906
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " ): Promise<IResult<StoredProcedureSchema>> {\n return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {\n // Remove square bracket notation if any, and split into schema and name.\n const schemaAndName = storedProcedureName.replace(/\\[|\\]/g, '').split('.');\n const result = await request.query<StoredProcedureSchema>(\n 'SELECT ' +\n 'PARAMETER_NAME as name, ' +\n 'DATA_TYPE as type, ' +\n 'PARAMETER_MODE as mode, ' +\n 'CHARACTER_MAXIMUM_LENGTH length, ' +",
"score": 0.8214578628540039
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " const preparedResult = this.prepareStoredProcedureResult(result, info);\n logPerformance(logger, 'prepareStoredProcedureResult', startTime);\n return preparedResult;\n }\n private prepareParameters(\n storedProcedureParameters: IterableIterator<StoredProcedureParameter>,\n input: InputParameters,\n ): Map<string, PreparedStoredProcedureParameter> {\n // We want to use the inferred DB Stored Procedure schema as the source of truth.\n const preparedParameters = new Map<string, PreparedStoredProcedureParameter>();",
"score": 0.8199496865272522
}
] |
typescript
|
input: InputParameters,
info?: GraphQLResolveInfo,
): Promise<IResolverProcedureResult<T>> {
|
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
|
const question = sanitizeInput(userInput);
|
const config = getConfig();
const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await logChat(chatLogDirectory, question, response.response);
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
|
src/index.ts
|
gmickel-memorybot-bad0302
|
[
{
"filename": "src/commands/helpCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nconst helpCommand = createCommand(\n 'help',\n ['h', '?'],\n 'Show the list of available commands',\n (_args, output, commandHandler) =>\n new Promise<void>((resolve) => {\n output.write(chalk.blue('Usage:\\n'));\n output.write('Ask memorybot to write some marketing materials and press enter.\\n');",
"score": 0.8572618365287781
},
{
"filename": "src/commands/resetChatCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { resetBufferWindowMemory, resetMemoryVectorStore, setMemoryVectorStore } from '../lib/memoryManager.js';\nconst resetChatCommand = createCommand(\n 'reset',\n [],\n 'Resets the chat and starts a new conversation - This clears the memory vector store and the buffer window memory.',\n async (_args, output) => {\n output.write(chalk.yellow('\\nResetting the chat!\\n'));\n await resetMemoryVectorStore((newMemoryVectorStore) => {",
"score": 0.8568043112754822
},
{
"filename": "src/commands/helpCommand.ts",
"retrieved_chunk": " output.write(chalk.blue('\\nAvailable commands:\\n'));\n commandHandler.getCommands().forEach((command) => {\n const aliases = command.aliases.length > 0 ? ` (/${command.aliases.join(', /')})` : '';\n output.write(chalk.yellow(`/${command.name}${aliases}`));\n output.write(` - ${command.description}`);\n output.write('\\n');\n });\n resolve();\n })\n);",
"score": 0.8567022085189819
},
{
"filename": "src/commands.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport changeContextStoreCommand from './commands/switchContextStoreCommand.js';\nimport helpCommand from './commands/helpCommand.js';\nimport quitCommand from './commands/quitCommand.js';\nimport resetChatCommand from './commands/resetChatCommand.js';\nimport addDocumentCommand from './commands/addDocumentCommand.js';\nimport addURLCommand from './commands/addURLCommand.js';\nimport addYouTubeCommand from './commands/addYouTubeCommand.js';\nimport setContextConfigCommand from './commands/setContextConfigCommand.js';\nimport setMemoryConfigCommand from './commands/setMemoryConfigCommand.js';",
"score": 0.8424274325370789
},
{
"filename": "src/commands.ts",
"retrieved_chunk": " getCommands,\n async execute(commandName: string, args: string[], output: NodeJS.WriteStream) {\n const command = commands.find((cmd) => cmd.name === commandName || cmd.aliases.includes(commandName));\n if (command) {\n await command.execute(args, output, commandHandler);\n } else {\n output.write(chalk.red('Unknown command. Type /help to see the list of available commands.\\n'));\n }\n },\n };",
"score": 0.8326172828674316
}
] |
typescript
|
const question = sanitizeInput(userInput);
|
import { type IResult, type Request } from 'mssql';
import type { StoredProcedureParameter, StoredProcedureSchema, ILogger } from '../types';
import { type DatabaseExecutor } from '../executor';
import { convertSqlValueToJsValue } from '../utils';
/**
* A manager for stored procedure metadata.
* Handles the retrieval and caching of stored procedure metadata.
*/
export class StoredProcedureMetadataManager {
/**
* Regular expression to extract MSSQL stored procedure names.
* See https://regex101.com/r/cMsTyT/1 for this regex.
*/
private static readonly storedProcedureNameRegex =
/((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))/i;
/**
* Matches any comments from the Stored Procedure definition.
* See https://regex101.com/r/dxA7n0/1 for this regex.
*/
private static readonly commentRegex = /(?:\s*-{2}.+\s*$)|(?:\/\*([\s\S]*?)\*\/)/gm;
/**
* Matches the parameters from the Stored Procedure definition.
* See https://regex101.com/r/4TaTky/1 for this regex.
*/
private static readonly parameterSectionRegex =
/(?<=(?:CREATE|ALTER)\s+PROCEDURE)\s+((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))(.*?)(?=(?:AS|FOR\s+REPLICATION)[^\w])/is;
/**
* See https://regex101.com/r/iMEaLb/1 for this regex.
* Match the individual parameters in the Parameter Definition.
*/
private static readonly parameterDefinitionRegex = /(@[\w]+)\s+([^\s]+)\s*=\s*([^, ]*),?/gi;
constructor(private readonly _databaseExecutor: DatabaseExecutor) {}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to retrieve the parameter schema for.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async getStoredProcedureParameterSchema(
storedProcedureName: string,
logger: ILogger,
): Promise<IResult<StoredProcedureSchema>> {
return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {
// Remove square bracket notation if any, and split into schema and name.
const schemaAndName = storedProcedureName.replace(/\[|\]/g, '').split('.');
const result = await request.query<StoredProcedureSchema>(
'SELECT ' +
'PARAMETER_NAME as name, ' +
'DATA_TYPE as type, ' +
'PARAMETER_MODE as mode, ' +
'CHARACTER_MAXIMUM_LENGTH length, ' +
'NUMERIC_PRECISION as precision, ' +
'NUMERIC_SCALE as scale ' +
'FROM INFORMATION_SCHEMA.PARAMETERS ' +
`WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';
SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,
);
const recordSetLength = result.recordsets.length as number;
if (recordSetLength < 1 || recordSetLength > 2) {
throw new Error(
`Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,
);
}
if (recordSetLength !== 2 || result.recordsets[1].length !== 1) {
throw new Error(
`Could not retrieve stored procedure definition from Database for stored procedure ${storedProcedureName}.`,
);
}
return result;
}, logger);
}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to parse the parameter schema for.
* @param {IResult<StoredProcedureSchema>} schemaResult - The result of the stored procedure parameter schema query.
* @returns A StoredProcedureParameter array.
*/
public parseStoredProcedureParameters(
storedProcedureName: string,
schemaResult: IResult<StoredProcedureSchema>,
): IterableIterator<StoredProcedureParameter> {
const parameterSchemaMap: Map<string, StoredProcedureParameter> =
schemaResult.recordsets[0].reduce(
(parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {
parameterMap.set(item.name, item);
return parameterMap;
},
new Map<string, StoredProcedureParameter>(),
);
const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;
if (storedProcedureDefinition == null) {
throw new Error(
`Could not parse stored procedure definition for stored procedure ${storedProcedureName}.`,
);
}
const commentStrippedStoredProcedureDefinition = storedProcedureDefinition.replace(
StoredProcedureMetadataManager.commentRegex,
'',
);
if (commentStrippedStoredProcedureDefinition === '') {
throw new Error(
`Could not parse stored procedure comments from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterSection = commentStrippedStoredProcedureDefinition.match(
StoredProcedureMetadataManager.parameterSectionRegex,
);
if (parameterSection === null || parameterSection.length !== 9) {
throw new Error(
`Could not parse stored procedure parameters from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterDefinition = parameterSection[8];
let parameterDefinitionMatch;
while (
(parameterDefinitionMatch =
StoredProcedureMetadataManager.parameterDefinitionRegex.exec(parameterDefinition)) !== null
) {
const name = parameterDefinitionMatch[1];
const type = parameterDefinitionMatch[2];
const defaultValue = parameterDefinitionMatch[3];
const parameter = parameterSchemaMap.get(name);
if (parameter !== undefined) {
parameter
|
.defaultValue = convertSqlValueToJsValue(defaultValue, type);
|
}
}
return parameterSchemaMap.values();
}
}
|
src/lib/stored-procedure/stored-procedure-metadata-manager.ts
|
Falven-mssql-data-source-bca6621
|
[
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " for (const spParameter of storedProcedureParameters) {\n const { name, type, length, precision, scale, ...rest } = spParameter;\n const parameterName = name.slice(1);\n // Let's use the parameter name in lowercase as the lookup key.\n preparedParameters.set(parameterName.toLowerCase(), {\n name: parameterName,\n type: mapDbTypeToDriverType({\n type,\n length,\n precision,",
"score": 0.8514732122421265
},
{
"filename": "src/lib/types/i-stored-procedure-parameter.ts",
"retrieved_chunk": " */\nexport interface StoredProcedureParameter {\n name: string;\n type: string;\n mode: ParameterMode;\n defaultValue?: unknown;\n length?: number;\n precision?: number;\n scale?: number;\n}",
"score": 0.8324053287506104
},
{
"filename": "src/lib/utils/type-map.ts",
"retrieved_chunk": "}: Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => {\n const types: IndexableTypes = TYPES;\n const property = findPropertyCaseInsensitive(types, type);\n if (property !== null) {\n const typeFactory = types[property as TypesKey];\n if (isSqlTypeFactoryWithNoParams(typeFactory)) {\n return typeFactory();\n } else if (isSqlTypeFactoryWithLength(typeFactory)) {\n return (typeFactory as ISqlTypeFactoryWithLength)(length === -1 ? MAX : length);\n } else if (isSqlTypeFactoryWithScale(typeFactory)) {",
"score": 0.8138533234596252
},
{
"filename": "src/lib/utils/type-map.ts",
"retrieved_chunk": "};\nexport const convertSqlValueToJsValue = (value: SqlValue, sqlType: string): unknown => {\n if (value === 'NULL') {\n return null;\n }\n const lowerCaseSqlType = sqlType.toLowerCase();\n if (\n isType(lowerCaseSqlType, [\n 'varchar',\n 'nvarchar',",
"score": 0.8085981607437134
},
{
"filename": "src/lib/types/stored-procedure-schema.ts",
"retrieved_chunk": "import type { StoredProcedureParameter } from '.';\n/**\n * Represents the result of a stored procedure execution.\n */\nexport type StoredProcedureSchema = [\n StoredProcedureParameter,\n {\n storedProcedureDefinition: string;\n },\n];",
"score": 0.8055092692375183
}
] |
typescript
|
.defaultValue = convertSqlValueToJsValue(defaultValue, type);
|
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
|
const memoryVectorStore = await getMemoryVectorStore();
|
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await logChat(chatLogDirectory, question, response.response);
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
|
src/index.ts
|
gmickel-memorybot-bad0302
|
[
{
"filename": "src/commands/resetChatCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { resetBufferWindowMemory, resetMemoryVectorStore, setMemoryVectorStore } from '../lib/memoryManager.js';\nconst resetChatCommand = createCommand(\n 'reset',\n [],\n 'Resets the chat and starts a new conversation - This clears the memory vector store and the buffer window memory.',\n async (_args, output) => {\n output.write(chalk.yellow('\\nResetting the chat!\\n'));\n await resetMemoryVectorStore((newMemoryVectorStore) => {",
"score": 0.8547061681747437
},
{
"filename": "src/commands/helpCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nconst helpCommand = createCommand(\n 'help',\n ['h', '?'],\n 'Show the list of available commands',\n (_args, output, commandHandler) =>\n new Promise<void>((resolve) => {\n output.write(chalk.blue('Usage:\\n'));\n output.write('Ask memorybot to write some marketing materials and press enter.\\n');",
"score": 0.8504498600959778
},
{
"filename": "src/commands/helpCommand.ts",
"retrieved_chunk": " output.write(chalk.blue('\\nAvailable commands:\\n'));\n commandHandler.getCommands().forEach((command) => {\n const aliases = command.aliases.length > 0 ? ` (/${command.aliases.join(', /')})` : '';\n output.write(chalk.yellow(`/${command.name}${aliases}`));\n output.write(` - ${command.description}`);\n output.write('\\n');\n });\n resolve();\n })\n);",
"score": 0.8467987775802612
},
{
"filename": "src/commands.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport changeContextStoreCommand from './commands/switchContextStoreCommand.js';\nimport helpCommand from './commands/helpCommand.js';\nimport quitCommand from './commands/quitCommand.js';\nimport resetChatCommand from './commands/resetChatCommand.js';\nimport addDocumentCommand from './commands/addDocumentCommand.js';\nimport addURLCommand from './commands/addURLCommand.js';\nimport addYouTubeCommand from './commands/addYouTubeCommand.js';\nimport setContextConfigCommand from './commands/setContextConfigCommand.js';\nimport setMemoryConfigCommand from './commands/setMemoryConfigCommand.js';",
"score": 0.8306122422218323
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "const memoryDirectory = path.join(projectRootDir, process.env.MEMORY_VECTOR_STORE_DIR || 'memory');\nlet memoryVectorStore: HNSWLib;\ntry {\n memoryVectorStore = await HNSWLib.load(memoryDirectory, new OpenAIEmbeddings());\n} catch {\n output.write(`${chalk.blue(`Creating a new memory vector store index in the ${memoryDirectory} directory`)}\\n`);\n memoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });",
"score": 0.8282147645950317
}
] |
typescript
|
const memoryVectorStore = await getMemoryVectorStore();
|
import { HTTP_METHODS } from './constants'
import * as subject from './api'
import * as z from 'zod'
import { HTTPMethod } from './types'
import { kebabToCamel } from './transforms'
const reqMock = vi.fn()
function successfulFetch(response: string | Record<string, unknown>) {
return async (input: URL | RequestInfo, init?: RequestInit | undefined) => {
reqMock({
url: input,
headers: init?.headers,
method: init?.method,
body: init?.body,
})
return new Response(
typeof response === 'string' ? response : JSON.stringify(response),
)
}
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('enhancedFetch', () => {
describe('json', () => {
it('should be untyped by default', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const result = await subject
.enhancedFetch('https://example.com/api/users')
.then((r) => r.json())
type _R = Expect<Equal<typeof result, unknown>>
expect(result).toEqual({ foo: 'bar' })
})
it('should accept a type', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const result = await subject
.enhancedFetch('https://example.com/api/users')
.then((r) => r.json<{ foo: string }>())
type _R = Expect<Equal<typeof result, { foo: string }>>
expect(result).toEqual({ foo: 'bar' })
})
it('should accept a parser', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const result = await subject
.enhancedFetch('https://example.com/api/users')
.then((r) => r.json(z.object({ foo: z.string() })))
type _R = Expect<Equal<typeof result, { foo: string }>>
expect(result).toEqual({ foo: 'bar' })
})
})
describe('text', () => {
it('should be untyped by default', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const result = await subject
.enhancedFetch('https://example.com/api/users')
.then((r) => r.text())
type _R = Expect<Equal<typeof result, string>>
expect(result).toEqual(`{"foo":"bar"}`)
})
it('should accept a type', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch('john@doe.com'),
)
const result = await subject
.enhancedFetch('https://example.com/api/users')
.then((r) => r.text<`${string}@${string}.${string}`>())
type _R = Expect<Equal<typeof result, `${string}@${string}.${string}`>>
expect(result).toEqual('john@doe.com')
})
it('should accept a parser', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch('john@doe.com'),
)
const result = await subject
.enhancedFetch('https://example.com/api/users')
.then((r) => r.text(z.string().email()))
type _R = Expect<Equal<typeof result, string>>
expect(result).toEqual('john@doe.com')
})
})
it('should accept a schema that transforms the response', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: { 'deep-nested': { 'kind-of-value': true } } }),
)
const result = await subject
.enhancedFetch('https://example.com/api/users')
.then((r) =>
r.json(
z
.object({
foo: z.object({
'deep-nested': z.object({ 'kind-of-value': z.boolean() }),
}),
})
.transform(kebabToCamel),
),
)
type _R = Expect<
Equal<typeof result, { foo: { deepNested: { kindOfValue: boolean } } }>
>
expect(result).toEqual({ foo: { deepNested: { kindOfValue: true } } })
})
it('should replace params in the URL', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
await subject.enhancedFetch(
'https://example.com/api/users/:user/page/:page',
{
params: {
user: '1',
page: '2',
// @ts-expect-error
foo: 'bar',
},
},
)
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users/1/page/2',
headers: new Headers({
'content-type': 'application/json',
}),
})
})
it('should accept a requestInit and a query', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
await subject.enhancedFetch('https://example.com/api/users', {
headers: { Authorization: 'Bearer 123' },
query: { admin: 'true' },
})
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users?admin=true',
headers: new Headers({
authorization: 'Bearer 123',
'content-type': 'application/json',
}),
})
})
it('should accept a stringified body', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
await subject.enhancedFetch('https://example.com/api/users', {
body: JSON.stringify({ id: 1, name: { first: 'John', last: 'Doe' } }),
method: 'POST',
})
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users',
headers: new Headers({ 'content-type': 'application/json' }),
method: 'POST',
body: `{"id":1,"name":{"first":"John","last":"Doe"}}`,
})
})
it('should stringify the body', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
await subject.enhancedFetch('https://example.com/api/users', {
body: { id: 1, name: { first: 'John', last: 'Doe' } },
method: 'POST',
})
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users',
headers: new Headers({ 'content-type': 'application/json' }),
method: 'POST',
body: `{"id":1,"name":{"first":"John","last":"Doe"}}`,
})
})
it('should accept a trace function for debugging purposes', async () => {
const trace = vi.fn()
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
await subject.enhancedFetch('https://example.com/api/users', {
body: { id: 1, name: { first: 'John', last: 'Doe' } },
query: { admin: 'true' },
trace,
method: 'POST',
})
expect(trace).toHaveBeenCalledWith(
'https://example.com/api/users?admin=true',
{
headers: new Headers({ 'content-type': 'application/json' }),
method: 'POST',
body: `{"id":1,"name":{"first":"John","last":"Doe"}}`,
},
)
})
})
describe('makeFetcher', () => {
it('should return a applied enhancedFetch', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const service = subject.makeFetcher('https://example.com/api')
const result = await service('/users', { method: 'post' }).then((r) =>
r.json(z.object({ foo: z.string() })),
)
type _R = Expect<Equal<typeof result, { foo: string }>>
expect(result).toEqual({ foo: 'bar' })
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users',
headers: new Headers({ 'content-type': 'application/json' }),
method: 'post',
})
})
it('should add headers to the request', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const fetcher = subject.makeFetcher('https://example.com/api', {
Authorization: 'Bearer 123',
})
await fetcher('/users')
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users',
headers: new Headers({
authorization: 'Bearer 123',
'content-type': 'application/json',
}),
})
})
it('should accept a typed params object', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const fetcher = subject.makeFetcher('https://example.com/api')
await fetcher('/users/:id', {
params: {
id: '1',
// @ts-expect-error
foo: 'bar',
},
})
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users/1',
headers: new Headers({ 'content-type': 'application/json' }),
})
})
it('should accept a function for dynamic headers', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const fetcher = subject.makeFetcher('https://example.com/api', () => ({
Authorization: 'Bearer 123',
}))
await fetcher('/users')
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users',
headers: new Headers({
authorization: 'Bearer 123',
'content-type': 'application/json',
}),
})
})
it('should accept an async function for dynamic headers', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const fetcher = subject.makeFetcher(
'https://example.com/api',
async () => ({
Authorization: 'Bearer 123',
}),
)
await fetcher('/users')
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users',
headers: new Headers({
authorization: 'Bearer 123',
'content-type': 'application/json',
}),
})
})
it('should accept a query, trace, and JSON-like body', async () => {
const trace = vi.fn()
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const fetcher = subject.makeFetcher('https://example.com/api')
await fetcher('/users', {
method: 'POST',
body: { id: 1, name: { first: 'John', last: 'Doe' } },
query: { admin: 'true' },
trace,
})
expect(trace).toHaveBeenCalledWith(
'https://example.com/api/users?admin=true',
{
headers: new Headers({ 'content-type': 'application/json' }),
method: 'POST',
body: `{"id":1,"name":{"first":"John","last":"Doe"}}`,
},
)
})
})
describe('makeService', () => {
it('should return an object with http methods', () => {
const service = subject.makeService('https://example.com/api')
for (const method of HTTP_METHODS) {
expect(
typeof service[method.toLocaleLowerCase()
|
as Lowercase<HTTPMethod>],
).toBe('function')
}
|
})
it('should return an API with enhancedFetch', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const service = subject.makeService('https://example.com/api')
const result = await service
.post('/users')
.then((r) => r.json(z.object({ foo: z.string() })))
type _R = Expect<Equal<typeof result, { foo: string }>>
expect(result).toEqual({ foo: 'bar' })
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users',
headers: new Headers({ 'content-type': 'application/json' }),
method: 'POST',
})
})
it('should accept a typed params object', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const service = subject.makeService('https://example.com/api')
await service.get('/users/:id', {
params: {
id: '1',
// @ts-expect-error
foo: 'bar',
},
})
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users/1',
headers: new Headers({ 'content-type': 'application/json' }),
method: 'GET',
})
})
})
describe('typedResponse', () => {
it('should return unknown by default when turning into a JSON', async () => {
const result = await subject.typedResponse(new Response('1')).json()
type _R = Expect<Equal<typeof result, unknown>>
expect(result).toEqual(1)
})
it('should accept a type for the JSON method', async () => {
const result = await subject
.typedResponse(new Response(`{"foo":"bar"}`))
.json<{ foo: string }>()
type _R = Expect<Equal<typeof result, { foo: string }>>
expect(result).toEqual({ foo: 'bar' })
})
it('should accept a parser for the JSON method', async () => {
const result = await subject
.typedResponse(new Response(`{"foo":"bar"}`))
.json(z.object({ foo: z.string() }))
type _R = Expect<Equal<typeof result, { foo: string }>>
expect(result).toEqual({ foo: 'bar' })
})
})
|
src/api.test.ts
|
gustavoguichard-make-service-e5a7bea
|
[
{
"filename": "src/primitives.test.ts",
"retrieved_chunk": " it('should return a URL which is baseURL and path joined', () => {\n expect(subject.makeGetApiURL('https://example.com/api')('/users')).toBe(\n 'https://example.com/api/users',\n )\n })\n it('should accept an object-like queryString and return it joined to the URL', () => {\n const getApiURL = subject.makeGetApiURL('https://example.com/api')\n expect(getApiURL('/users', { id: '1' })).toBe(\n 'https://example.com/api/users?id=1',\n )",
"score": 0.8384658694267273
},
{
"filename": "src/api.ts",
"retrieved_chunk": " ReturnType<typeof appliedService>\n >\n for (const method of HTTP_METHODS) {\n const lowerMethod = method.toLowerCase() as Lowercase<HTTPMethod>\n service[lowerMethod] = appliedService(method)\n }\n return service\n}\nexport { enhancedFetch, makeFetcher, makeService, typedResponse }",
"score": 0.8260641694068909
},
{
"filename": "src/api.ts",
"retrieved_chunk": ") {\n const fetcher = makeFetcher(baseURL, baseHeaders)\n function appliedService(method: HTTPMethod) {\n return async <T extends string>(\n path: T,\n requestInit: ServiceRequestInit<T> = {},\n ) => fetcher(path, { ...requestInit, method })\n }\n let service = {} as Record<\n Lowercase<HTTPMethod>,",
"score": 0.8259459733963013
},
{
"filename": "src/primitives.test.ts",
"retrieved_chunk": " expect(subject.ensureStringBody(rs)).toBe(rs)\n const fd = new FormData()\n expect(subject.ensureStringBody(fd)).toBe(fd)\n const usp = new URLSearchParams()\n expect(subject.ensureStringBody(usp)).toBe(usp)\n const blob = new Blob()\n expect(subject.ensureStringBody(blob)).toBe(blob)\n })\n})\ndescribe('makeGetApiURL', () => {",
"score": 0.8255554437637329
},
{
"filename": "src/primitives.test.ts",
"retrieved_chunk": " ).toBe('https://example.com/api/users')\n expect(\n subject.makeGetApiURL(new URL('https://example.com/api/'))('///users'),\n ).toBe('https://example.com/api/users')\n })\n it('should add missing slashes', () => {\n expect(\n subject.makeGetApiURL(new URL('https://example.com/api'))('users'),\n ).toBe('https://example.com/api/users')\n })",
"score": 0.8222729563713074
}
] |
typescript
|
as Lowercase<HTTPMethod>],
).toBe('function')
}
|
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
const context =
|
await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
|
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await logChat(chatLogDirectory, question, response.response);
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
|
src/index.ts
|
gmickel-memorybot-bad0302
|
[
{
"filename": "src/commands/setContextConfigCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { setNumContextDocumentsToRetrieve, getConfig } from '../config/index.js';\nconst setContextConfigCommand = createCommand(\n 'context-config',\n ['cc'],\n `Sets the number of relevant documents to return from the context vector store.\\n\n Arguments: \\`number of documents\\` (Default: 6)\\n\n Example: \\`/context-config 10\\``,\n async (args, output) => {",
"score": 0.8258107900619507
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " let vectorStore: HNSWLib;\n let spinner;\n const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);\n await createDirectory(newContextVectorStorePath);\n setCurrentVectorStoreDatabasePath(newContextVectorStorePath);\n const dbDirectory = getConfig().currentVectorStoreDatabasePath;\n try {\n vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));\n output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\\n`));\n } catch {",
"score": 0.8227142691612244
},
{
"filename": "src/commands/setContextConfigCommand.ts",
"retrieved_chunk": " if (!args || args.length !== 1) {\n output.write(chalk.red('Invalid number of arguments. Usage: /context-config `number of documents`\\n'));\n return;\n }\n const numContextDocumentsToRetrieve = parseInt(args[0], 10);\n setNumContextDocumentsToRetrieve(numContextDocumentsToRetrieve);\n const config = getConfig();\n output.write(chalk.blue(`Number of context documents to retrieve set to ${config.numContextDocumentsToRetrieve}`));\n }\n);",
"score": 0.8210636377334595
},
{
"filename": "src/commands/setMemoryConfigCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { setNumMemoryDocumentsToRetrieve, getConfig } from '../config/index.js';\nconst setMemoryConfigCommand = createCommand(\n 'memory-config',\n ['mc'],\n `Sets the number of relevant documents to return from the memory vector store.\\n\n Arguments: \\`number of documents\\` (Default: 4)\\n\n Example: /memory-config 10`,\n async (args, output) => {",
"score": 0.8201382160186768
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " const dbDirectory = getConfig().currentVectorStoreDatabasePath;\n try {\n vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));\n } catch {\n spinner = ora({\n ...defaultOraOptions,\n text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),\n }).start();\n const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');\n const filesToAdd = await getDirectoryFiles(docsDirectory);",
"score": 0.8165605068206787
}
] |
typescript
|
await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
|
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await
|
logChat(chatLogDirectory, question, response.response);
|
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
|
src/index.ts
|
gmickel-memorybot-bad0302
|
[
{
"filename": "src/chatLogger.ts",
"retrieved_chunk": "const getLogFilename = (): string => {\n const currentDate = new Date();\n const year = currentDate.getFullYear();\n const month = String(currentDate.getMonth() + 1).padStart(2, '0');\n const day = String(currentDate.getDate()).padStart(2, '0');\n return `${year}-${month}-${day}.json`;\n};\nconst logChat = async (logDirectory: string, question: string, answer: string): Promise<void> => {\n const timestamp = new Date().toISOString();\n const chatHistory: ChatHistory = { timestamp, question, answer };",
"score": 0.8577073812484741
},
{
"filename": "src/chatLogger.ts",
"retrieved_chunk": " const logFilename = getLogFilename();\n const logFilePath = path.join(logDirectory, logFilename);\n ensureLogDirectory(logDirectory);\n if (!fs.existsSync(logFilePath)) {\n await fs.writeJson(logFilePath, [chatHistory]);\n } else {\n const chatHistoryArray = await fs.readJson(logFilePath);\n chatHistoryArray.push(chatHistory);\n await fs.writeJson(logFilePath, chatHistoryArray);\n }",
"score": 0.8483507633209229
},
{
"filename": "src/chatLogger.ts",
"retrieved_chunk": "import fs from 'fs-extra';\nimport path from 'path';\ninterface ChatHistory {\n timestamp: string;\n question: string;\n answer: string;\n}\nconst ensureLogDirectory = (logDirectory: string): void => {\n fs.ensureDirSync(logDirectory);\n};",
"score": 0.8216291069984436
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "async function getMemoryVectorStore() {\n return memoryWrapper.vectorStoreInstance;\n}\nfunction getBufferWindowMemory() {\n return bufferWindowMemory;\n}\nasync function saveMemoryVectorStore() {\n await memoryWrapper.vectorStoreInstance.save(memoryDirectory);\n}\nasync function addDocumentsToMemoryVectorStore(",
"score": 0.8088667392730713
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " documents: Array<{ content: string; metadataType: string }>\n): Promise<void> {\n const formattedDocuments = documents.map(\n (doc) => new Document({ pageContent: doc.content, metadata: { type: doc.metadataType } })\n );\n await memoryWrapper.vectorStoreInstance.addDocuments(formattedDocuments);\n await saveMemoryVectorStore();\n}\nfunction resetBufferWindowMemory() {\n bufferWindowMemory.clear();",
"score": 0.8085203170776367
}
] |
typescript
|
logChat(chatLogDirectory, question, response.response);
|
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
|
const windowMemory = getBufferWindowMemory();
|
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await logChat(chatLogDirectory, question, response.response);
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
|
src/index.ts
|
gmickel-memorybot-bad0302
|
[
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "}\nconst bufferWindowMemory = new BufferWindowMemory({\n returnMessages: false,\n memoryKey: 'immediate_history',\n inputKey: 'input',\n k: 2,\n});\nconst memoryWrapper = {\n vectorStoreInstance: memoryVectorStore,\n};",
"score": 0.8412333130836487
},
{
"filename": "src/commands/toggleWindowBufferMemoryCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { setUseWindowMemory, getConfig } from '../config/index.js';\nconst toggleWindowBufferMemoryCommand = createCommand(\n 'toggle-window-memory',\n ['wm'],\n `Toggles the window buffer memory (MemoryBot's short-term transient memory) on or off.`,\n async (_args, output) => {\n setUseWindowMemory(!getConfig().useWindowMemory);\n const config = getConfig();",
"score": 0.8240911364555359
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport { HNSWLib } from 'langchain/vectorstores/hnswlib';\nimport fs from 'fs/promises';\nimport path from 'path';\nimport { stdout as output } from 'node:process';\nimport { OpenAIEmbeddings } from 'langchain/embeddings/openai';\nimport { Document } from 'langchain/document';\nimport { BufferWindowMemory } from 'langchain/memory';\nimport { getProjectRoot } from '../config/index.js';\nconst projectRootDir = getProjectRoot();",
"score": 0.8178315162658691
},
{
"filename": "src/config/index.ts",
"retrieved_chunk": " return {\n text: 'Loading',\n stream: output,\n discardStdin: false,\n };\n}\nconst defaultConfig: Config = {\n currentVectorStoreDatabasePath: path.join(getProjectRoot(), process.env.VECTOR_STORE_DIR || 'db/default'),\n numContextDocumentsToRetrieve: 6,\n numMemoryDocumentsToRetrieve: 4,",
"score": 0.8173755407333374
},
{
"filename": "src/commands/toggleWindowBufferMemoryCommand.ts",
"retrieved_chunk": " output.write(chalk.blue(`Use Window Buffer Memory set to ${config.useWindowMemory}`));\n }\n);\nexport default toggleWindowBufferMemoryCommand;",
"score": 0.8114190101623535
}
] |
typescript
|
const windowMemory = getBufferWindowMemory();
|
import {
type ISqlTypeFactory,
type ISqlTypeFactoryWithLength,
type ISqlTypeFactoryWithNoParams,
type ISqlTypeFactoryWithPrecisionScale,
type ISqlTypeFactoryWithScale,
type ISqlTypeFactoryWithTvpType,
type ISqlTypeWithLength,
type ISqlTypeWithNoParams,
type ISqlTypeWithPrecisionScale,
type ISqlTypeWithScale,
type ISqlTypeWithTvpType,
TYPES,
MAX,
} from 'mssql';
import type { StoredProcedureParameter } from '../types';
type TypeFactory<T> = T extends ISqlTypeFactoryWithNoParams
? () => ISqlTypeWithNoParams
: T extends ISqlTypeFactoryWithLength
? (length?: number) => ISqlTypeWithLength
: T extends ISqlTypeFactoryWithScale
? (scale?: number) => ISqlTypeWithScale
: T extends ISqlTypeFactoryWithPrecisionScale
? (precision?: number, scale?: number) => ISqlTypeWithPrecisionScale
: T extends ISqlTypeFactoryWithTvpType
? (tvpType?: unknown) => ISqlTypeWithTvpType
: never;
type TypesType = typeof TYPES;
type TypesKey = keyof TypesType;
type IndexableTypes = {
[K in TypesKey]: TypeFactory<TypesType[K]>;
};
function isSqlTypeFactoryWithNoParams(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithNoParams {
return (
factoryObject !== undefined &&
!('length' in factoryObject) &&
!('scale' in factoryObject) &&
!('precision' in factoryObject) &&
!('tvpType' in factoryObject)
);
}
function isSqlTypeFactoryWithLength(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithLength {
return factoryObject !== undefined && 'length' in factoryObject;
}
function isSqlTypeFactoryWithScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithScale {
return factoryObject !== undefined && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithPrecisionScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithPrecisionScale {
return factoryObject !== undefined && 'precision' in factoryObject && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithTvpType(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithTvpType {
return factoryObject !== undefined && 'tvpType' in factoryObject;
}
const findPropertyCaseInsensitive = (obj: object, propertyName: string): string | null => {
const lowercasePropertyName = propertyName.toLowerCase();
for (const key in obj) {
if (
Object.prototype.hasOwnProperty.call(obj, key) &&
key.toLowerCase() === lowercasePropertyName
) {
return key;
}
}
return null;
};
export const mapDbTypeToDriverType = ({
type,
length,
precision,
scale,
}: Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => {
const types: IndexableTypes = TYPES;
const property = findPropertyCaseInsensitive(types, type);
if (property !== null) {
const typeFactory = types[property as TypesKey];
if (isSqlTypeFactoryWithNoParams(typeFactory)) {
return typeFactory();
} else if (isSqlTypeFactoryWithLength(typeFactory)) {
return (
|
typeFactory as ISqlTypeFactoryWithLength)(length === -1 ? MAX : length);
|
} else if (isSqlTypeFactoryWithScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithScale)(scale);
} else if (isSqlTypeFactoryWithPrecisionScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithPrecisionScale)(precision, scale);
} else if (isSqlTypeFactoryWithTvpType(typeFactory)) {
return TYPES.NVarChar();
} else {
throw new Error(`Unknown SQL Type ${type}.`);
}
}
return TYPES.NVarChar();
};
type SqlValue = string | number | boolean | Date | Buffer;
const isStringOrNumber = (value: SqlValue): value is string | number => {
return typeof value === 'string' || typeof value === 'number';
};
const isDate = (value: SqlValue): value is Date => {
return value instanceof Date;
};
const isType = (sqlType: string, typePrefixes: string[]): boolean => {
return typePrefixes.some((prefix) => sqlType.startsWith(prefix));
};
export const convertSqlValueToJsValue = (value: SqlValue, sqlType: string): unknown => {
if (value === 'NULL') {
return null;
}
const lowerCaseSqlType = sqlType.toLowerCase();
if (
isType(lowerCaseSqlType, [
'varchar',
'nvarchar',
'char',
'nchar',
'text',
'ntext',
'xml',
'uniqueidentifier',
])
) {
return String(value);
}
if (
isType(lowerCaseSqlType, [
'int',
'smallint',
'tinyint',
'bigint',
'decimal',
'numeric',
'float',
'real',
'money',
'smallmoney',
])
) {
return Number(value);
}
if (isType(lowerCaseSqlType, ['bit'])) {
return Boolean(value);
}
if (isType(lowerCaseSqlType, ['date', 'datetime', 'datetime2', 'smalldatetime', 'time'])) {
if (isStringOrNumber(value) || isDate(value)) {
return new Date(value);
}
throw new Error('Cannot create a Date from a boolean value.');
}
if (isType(lowerCaseSqlType, ['binary', 'varbinary', 'image'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['rowversion', 'timestamp'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['hierarchyid', 'geometry', 'geography'])) {
return value;
}
if (isType(lowerCaseSqlType, ['tvp'])) {
throw new Error('TVPs are not supported.');
}
if (isType(lowerCaseSqlType, ['udt'])) {
throw new Error('UDTs are not supported.');
}
throw new Error(`Unsupported SQL type: ${sqlType}`);
};
|
src/lib/utils/type-map.ts
|
Falven-mssql-data-source-bca6621
|
[
{
"filename": "src/lib/types/i-stored-procedure-parameter.ts",
"retrieved_chunk": " */\nexport interface StoredProcedureParameter {\n name: string;\n type: string;\n mode: ParameterMode;\n defaultValue?: unknown;\n length?: number;\n precision?: number;\n scale?: number;\n}",
"score": 0.825131893157959
},
{
"filename": "src/lib/types/driver-type.ts",
"retrieved_chunk": "import type { ISqlType } from 'mssql';\n/**\n * Driver types that can be used to specify the type of a stored procedure parameter.\n */\nexport type DriverType = (() => ISqlType) | ISqlType;",
"score": 0.8224927186965942
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": "import { camelCase } from 'lodash';\nimport { type Request, type IProcedureResult, type IResult, type IRecordSet } from 'mssql';\nimport { type GraphQLResolveInfo } from 'graphql';\nimport {\n type DriverType,\n type PreparedStoredProcedureParameter,\n ParameterMode,\n type StoredProcedureSchema,\n type StoredProcedureParameter,\n type ILogger,",
"score": 0.8060399293899536
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " ): Promise<IResult<StoredProcedureSchema>> {\n return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {\n // Remove square bracket notation if any, and split into schema and name.\n const schemaAndName = storedProcedureName.replace(/\\[|\\]/g, '').split('.');\n const result = await request.query<StoredProcedureSchema>(\n 'SELECT ' +\n 'PARAMETER_NAME as name, ' +\n 'DATA_TYPE as type, ' +\n 'PARAMETER_MODE as mode, ' +\n 'CHARACTER_MAXIMUM_LENGTH length, ' +",
"score": 0.8058534264564514
},
{
"filename": "src/lib/types/prepared-stored-procedure-parameter.ts",
"retrieved_chunk": "import type { StoredProcedureParameter, DriverType } from '.';\n/**\n * Final parameters that will be passed to the stored procedure request.\n */\nexport type PreparedStoredProcedureParameter = Omit<StoredProcedureParameter, 'type'> & {\n type: DriverType;\n value?: unknown;\n};",
"score": 0.8034993410110474
}
] |
typescript
|
typeFactory as ISqlTypeFactoryWithLength)(length === -1 ? MAX : length);
|
import {
type ISqlTypeFactory,
type ISqlTypeFactoryWithLength,
type ISqlTypeFactoryWithNoParams,
type ISqlTypeFactoryWithPrecisionScale,
type ISqlTypeFactoryWithScale,
type ISqlTypeFactoryWithTvpType,
type ISqlTypeWithLength,
type ISqlTypeWithNoParams,
type ISqlTypeWithPrecisionScale,
type ISqlTypeWithScale,
type ISqlTypeWithTvpType,
TYPES,
MAX,
} from 'mssql';
import type { StoredProcedureParameter } from '../types';
type TypeFactory<T> = T extends ISqlTypeFactoryWithNoParams
? () => ISqlTypeWithNoParams
: T extends ISqlTypeFactoryWithLength
? (length?: number) => ISqlTypeWithLength
: T extends ISqlTypeFactoryWithScale
? (scale?: number) => ISqlTypeWithScale
: T extends ISqlTypeFactoryWithPrecisionScale
? (precision?: number, scale?: number) => ISqlTypeWithPrecisionScale
: T extends ISqlTypeFactoryWithTvpType
? (tvpType?: unknown) => ISqlTypeWithTvpType
: never;
type TypesType = typeof TYPES;
type TypesKey = keyof TypesType;
type IndexableTypes = {
[K in TypesKey]: TypeFactory<TypesType[K]>;
};
function isSqlTypeFactoryWithNoParams(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithNoParams {
return (
factoryObject !== undefined &&
!('length' in factoryObject) &&
!('scale' in factoryObject) &&
!('precision' in factoryObject) &&
!('tvpType' in factoryObject)
);
}
function isSqlTypeFactoryWithLength(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithLength {
return factoryObject !== undefined && 'length' in factoryObject;
}
function isSqlTypeFactoryWithScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithScale {
return factoryObject !== undefined && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithPrecisionScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithPrecisionScale {
return factoryObject !== undefined && 'precision' in factoryObject && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithTvpType(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithTvpType {
return factoryObject !== undefined && 'tvpType' in factoryObject;
}
const findPropertyCaseInsensitive = (obj: object, propertyName: string): string | null => {
const lowercasePropertyName = propertyName.toLowerCase();
for (const key in obj) {
if (
Object.prototype.hasOwnProperty.call(obj, key) &&
key.toLowerCase() === lowercasePropertyName
) {
return key;
}
}
return null;
};
export const mapDbTypeToDriverType = ({
type,
length,
precision,
scale,
|
}: Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => {
|
const types: IndexableTypes = TYPES;
const property = findPropertyCaseInsensitive(types, type);
if (property !== null) {
const typeFactory = types[property as TypesKey];
if (isSqlTypeFactoryWithNoParams(typeFactory)) {
return typeFactory();
} else if (isSqlTypeFactoryWithLength(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithLength)(length === -1 ? MAX : length);
} else if (isSqlTypeFactoryWithScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithScale)(scale);
} else if (isSqlTypeFactoryWithPrecisionScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithPrecisionScale)(precision, scale);
} else if (isSqlTypeFactoryWithTvpType(typeFactory)) {
return TYPES.NVarChar();
} else {
throw new Error(`Unknown SQL Type ${type}.`);
}
}
return TYPES.NVarChar();
};
type SqlValue = string | number | boolean | Date | Buffer;
const isStringOrNumber = (value: SqlValue): value is string | number => {
return typeof value === 'string' || typeof value === 'number';
};
const isDate = (value: SqlValue): value is Date => {
return value instanceof Date;
};
const isType = (sqlType: string, typePrefixes: string[]): boolean => {
return typePrefixes.some((prefix) => sqlType.startsWith(prefix));
};
export const convertSqlValueToJsValue = (value: SqlValue, sqlType: string): unknown => {
if (value === 'NULL') {
return null;
}
const lowerCaseSqlType = sqlType.toLowerCase();
if (
isType(lowerCaseSqlType, [
'varchar',
'nvarchar',
'char',
'nchar',
'text',
'ntext',
'xml',
'uniqueidentifier',
])
) {
return String(value);
}
if (
isType(lowerCaseSqlType, [
'int',
'smallint',
'tinyint',
'bigint',
'decimal',
'numeric',
'float',
'real',
'money',
'smallmoney',
])
) {
return Number(value);
}
if (isType(lowerCaseSqlType, ['bit'])) {
return Boolean(value);
}
if (isType(lowerCaseSqlType, ['date', 'datetime', 'datetime2', 'smalldatetime', 'time'])) {
if (isStringOrNumber(value) || isDate(value)) {
return new Date(value);
}
throw new Error('Cannot create a Date from a boolean value.');
}
if (isType(lowerCaseSqlType, ['binary', 'varbinary', 'image'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['rowversion', 'timestamp'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['hierarchyid', 'geometry', 'geography'])) {
return value;
}
if (isType(lowerCaseSqlType, ['tvp'])) {
throw new Error('TVPs are not supported.');
}
if (isType(lowerCaseSqlType, ['udt'])) {
throw new Error('UDTs are not supported.');
}
throw new Error(`Unsupported SQL type: ${sqlType}`);
};
|
src/lib/utils/type-map.ts
|
Falven-mssql-data-source-bca6621
|
[
{
"filename": "src/lib/types/driver-type.ts",
"retrieved_chunk": "import type { ISqlType } from 'mssql';\n/**\n * Driver types that can be used to specify the type of a stored procedure parameter.\n */\nexport type DriverType = (() => ISqlType) | ISqlType;",
"score": 0.8328269720077515
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " for (const spParameter of storedProcedureParameters) {\n const { name, type, length, precision, scale, ...rest } = spParameter;\n const parameterName = name.slice(1);\n // Let's use the parameter name in lowercase as the lookup key.\n preparedParameters.set(parameterName.toLowerCase(), {\n name: parameterName,\n type: mapDbTypeToDriverType({\n type,\n length,\n precision,",
"score": 0.8325679898262024
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " type InputParameters,\n} from '../types';\nimport { mapDbTypeToDriverType, replacer } from '../utils';\nimport { logExecutionBegin, logPerformance, logSafely } from '../logging';\nimport {\n type StoredProcedureCacheManager,\n type StoredProcedureMetadataManager,\n} from '../stored-procedure';\nimport { type IResolverProcedureResult } from '../types/i-resolver-procedure-result';\nimport { getNodeSelectionSetNames, getFieldNamesExcludingNode } from '../utils/graphql-helper';",
"score": 0.8183020353317261
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": "import { camelCase } from 'lodash';\nimport { type Request, type IProcedureResult, type IResult, type IRecordSet } from 'mssql';\nimport { type GraphQLResolveInfo } from 'graphql';\nimport {\n type DriverType,\n type PreparedStoredProcedureParameter,\n ParameterMode,\n type StoredProcedureSchema,\n type StoredProcedureParameter,\n type ILogger,",
"score": 0.8122572302818298
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " const parameterSchemaMap: Map<string, StoredProcedureParameter> =\n schemaResult.recordsets[0].reduce(\n (parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {\n parameterMap.set(item.name, item);\n return parameterMap;\n },\n new Map<string, StoredProcedureParameter>(),\n );\n const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;\n if (storedProcedureDefinition == null) {",
"score": 0.8022817373275757
}
] |
typescript
|
}: Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => {
|
import { RequestInfo, RequestInit, Response } from 'node-fetch';
import { cloneResponse } from './cloneResponse';
import { timeSpan } from './timeSpan';
import { apiDebug }from './apiDebug';
import { apiReport } from './apiReport';
import { getRequestMock, saveRequestMock } from './apiCache';
const fetch = (url: URL | RequestInfo, init?: RequestInit) =>
import('node-fetch').then(({ default: fetch }) => fetch(url, init));
type RequestOptions = RequestInit & {
shouldReport?: boolean;
};
export const apiWithLog = async (
init: RequestInfo,
optionsApi: RequestOptions = { method: 'GET' },
): Promise<Response> => {
const end = timeSpan();
const options = {
...optionsApi,
headers: {
...(optionsApi.headers || {}),
'user-agent': 'node-fetch',
},
};
const requestMock = await getRequestMock(init, options);
if (requestMock) {
return requestMock;
}
return fetch(init, options).then(async (response) => {
const durationTime = end();
const text = await response.text();
let json: any = null;
try {
json = JSON.parse(text);
} catch (err) {
// eslint-disable-next-line
}
const getBody = (): Record<string, string> => {
if (json) {
return {
json,
};
}
return {
text,
};
};
await saveRequestMock(init, options, text, response);
apiDebug({
init,
options,
durationTime,
getBody,
response,
});
|
await apiReport({
|
init,
options,
getBody,
response,
json,
text,
});
const { responseCopy } = await cloneResponse(response, text);
return responseCopy;
});
};
|
src/apiWithLog.ts
|
entria-apiWithLog-be8c368
|
[
{
"filename": "src/apiCache.ts",
"retrieved_chunk": "export const saveRequestMock = async (\n init: RequestInfo,\n options: RequestInit,\n text: string,\n response: Response,\n) => {\n if (process.env.WRITE_MOCK !== 'true') {\n return;\n }\n // only save ok requests 200",
"score": 0.8925976753234863
},
{
"filename": "src/apiCache.ts",
"retrieved_chunk": " const { text, response } = mock;\n const { responseCopy } = await cloneResponse(response, text);\n // eslint-disable-next-line\n console.log('mock-cache: ', requestKey);\n if (process.env.DEBUG === 'true') {\n // eslint-disable-next-line\n const { agent, ...optionsWithoutAgent } = options;\n const curl = getCurl(init, options);\n const getBody = () => {\n let json = null;",
"score": 0.8822197914123535
},
{
"filename": "src/apiReport.ts",
"retrieved_chunk": "export const apiReport = async ({\n init,\n options,\n getBody,\n response,\n json,\n text,\n}: ApiReport) => {\n const canReport =\n typeof options?.shouldReport === 'boolean' ? options.shouldReport : true;",
"score": 0.8724936842918396
},
{
"filename": "src/apiCache.ts",
"retrieved_chunk": " };\n };\n // eslint-disable-next-line\n debugConsole({\n init,\n options: optionsWithoutAgent,\n ...getBody(),\n ok: response.ok,\n status: response.status,\n curl,",
"score": 0.8583847284317017
},
{
"filename": "src/apiDebug.ts",
"retrieved_chunk": " // eslint-disable-next-line\n debugConsole({\n time: `${durationTime}ms`,\n init,\n options: cleanOptions,\n // text,\n // json,\n ...getBody(),\n ok: response.ok,\n status: response.status,",
"score": 0.8554368019104004
}
] |
typescript
|
await apiReport({
|
import {
type ISqlTypeFactory,
type ISqlTypeFactoryWithLength,
type ISqlTypeFactoryWithNoParams,
type ISqlTypeFactoryWithPrecisionScale,
type ISqlTypeFactoryWithScale,
type ISqlTypeFactoryWithTvpType,
type ISqlTypeWithLength,
type ISqlTypeWithNoParams,
type ISqlTypeWithPrecisionScale,
type ISqlTypeWithScale,
type ISqlTypeWithTvpType,
TYPES,
MAX,
} from 'mssql';
import type { StoredProcedureParameter } from '../types';
type TypeFactory<T> = T extends ISqlTypeFactoryWithNoParams
? () => ISqlTypeWithNoParams
: T extends ISqlTypeFactoryWithLength
? (length?: number) => ISqlTypeWithLength
: T extends ISqlTypeFactoryWithScale
? (scale?: number) => ISqlTypeWithScale
: T extends ISqlTypeFactoryWithPrecisionScale
? (precision?: number, scale?: number) => ISqlTypeWithPrecisionScale
: T extends ISqlTypeFactoryWithTvpType
? (tvpType?: unknown) => ISqlTypeWithTvpType
: never;
type TypesType = typeof TYPES;
type TypesKey = keyof TypesType;
type IndexableTypes = {
[K in TypesKey]: TypeFactory<TypesType[K]>;
};
function isSqlTypeFactoryWithNoParams(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithNoParams {
return (
factoryObject !== undefined &&
!('length' in factoryObject) &&
!('scale' in factoryObject) &&
!('precision' in factoryObject) &&
!('tvpType' in factoryObject)
);
}
function isSqlTypeFactoryWithLength(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithLength {
return factoryObject !== undefined && 'length' in factoryObject;
}
function isSqlTypeFactoryWithScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithScale {
return factoryObject !== undefined && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithPrecisionScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithPrecisionScale {
return factoryObject !== undefined && 'precision' in factoryObject && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithTvpType(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithTvpType {
return factoryObject !== undefined && 'tvpType' in factoryObject;
}
const findPropertyCaseInsensitive = (obj: object, propertyName: string): string | null => {
const lowercasePropertyName = propertyName.toLowerCase();
for (const key in obj) {
if (
Object.prototype.hasOwnProperty.call(obj, key) &&
key.toLowerCase() === lowercasePropertyName
) {
return key;
}
}
return null;
};
export const mapDbTypeToDriverType = ({
type,
length,
precision,
scale,
}: Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => {
const types: IndexableTypes = TYPES;
|
const property = findPropertyCaseInsensitive(types, type);
|
if (property !== null) {
const typeFactory = types[property as TypesKey];
if (isSqlTypeFactoryWithNoParams(typeFactory)) {
return typeFactory();
} else if (isSqlTypeFactoryWithLength(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithLength)(length === -1 ? MAX : length);
} else if (isSqlTypeFactoryWithScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithScale)(scale);
} else if (isSqlTypeFactoryWithPrecisionScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithPrecisionScale)(precision, scale);
} else if (isSqlTypeFactoryWithTvpType(typeFactory)) {
return TYPES.NVarChar();
} else {
throw new Error(`Unknown SQL Type ${type}.`);
}
}
return TYPES.NVarChar();
};
type SqlValue = string | number | boolean | Date | Buffer;
const isStringOrNumber = (value: SqlValue): value is string | number => {
return typeof value === 'string' || typeof value === 'number';
};
const isDate = (value: SqlValue): value is Date => {
return value instanceof Date;
};
const isType = (sqlType: string, typePrefixes: string[]): boolean => {
return typePrefixes.some((prefix) => sqlType.startsWith(prefix));
};
export const convertSqlValueToJsValue = (value: SqlValue, sqlType: string): unknown => {
if (value === 'NULL') {
return null;
}
const lowerCaseSqlType = sqlType.toLowerCase();
if (
isType(lowerCaseSqlType, [
'varchar',
'nvarchar',
'char',
'nchar',
'text',
'ntext',
'xml',
'uniqueidentifier',
])
) {
return String(value);
}
if (
isType(lowerCaseSqlType, [
'int',
'smallint',
'tinyint',
'bigint',
'decimal',
'numeric',
'float',
'real',
'money',
'smallmoney',
])
) {
return Number(value);
}
if (isType(lowerCaseSqlType, ['bit'])) {
return Boolean(value);
}
if (isType(lowerCaseSqlType, ['date', 'datetime', 'datetime2', 'smalldatetime', 'time'])) {
if (isStringOrNumber(value) || isDate(value)) {
return new Date(value);
}
throw new Error('Cannot create a Date from a boolean value.');
}
if (isType(lowerCaseSqlType, ['binary', 'varbinary', 'image'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['rowversion', 'timestamp'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['hierarchyid', 'geometry', 'geography'])) {
return value;
}
if (isType(lowerCaseSqlType, ['tvp'])) {
throw new Error('TVPs are not supported.');
}
if (isType(lowerCaseSqlType, ['udt'])) {
throw new Error('UDTs are not supported.');
}
throw new Error(`Unsupported SQL type: ${sqlType}`);
};
|
src/lib/utils/type-map.ts
|
Falven-mssql-data-source-bca6621
|
[
{
"filename": "src/lib/types/driver-type.ts",
"retrieved_chunk": "import type { ISqlType } from 'mssql';\n/**\n * Driver types that can be used to specify the type of a stored procedure parameter.\n */\nexport type DriverType = (() => ISqlType) | ISqlType;",
"score": 0.8306499719619751
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " for (const spParameter of storedProcedureParameters) {\n const { name, type, length, precision, scale, ...rest } = spParameter;\n const parameterName = name.slice(1);\n // Let's use the parameter name in lowercase as the lookup key.\n preparedParameters.set(parameterName.toLowerCase(), {\n name: parameterName,\n type: mapDbTypeToDriverType({\n type,\n length,\n precision,",
"score": 0.8281210064888
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " type InputParameters,\n} from '../types';\nimport { mapDbTypeToDriverType, replacer } from '../utils';\nimport { logExecutionBegin, logPerformance, logSafely } from '../logging';\nimport {\n type StoredProcedureCacheManager,\n type StoredProcedureMetadataManager,\n} from '../stored-procedure';\nimport { type IResolverProcedureResult } from '../types/i-resolver-procedure-result';\nimport { getNodeSelectionSetNames, getFieldNamesExcludingNode } from '../utils/graphql-helper';",
"score": 0.826791524887085
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": "import { camelCase } from 'lodash';\nimport { type Request, type IProcedureResult, type IResult, type IRecordSet } from 'mssql';\nimport { type GraphQLResolveInfo } from 'graphql';\nimport {\n type DriverType,\n type PreparedStoredProcedureParameter,\n ParameterMode,\n type StoredProcedureSchema,\n type StoredProcedureParameter,\n type ILogger,",
"score": 0.8156561851501465
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " const parameterSchemaMap: Map<string, StoredProcedureParameter> =\n schemaResult.recordsets[0].reduce(\n (parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {\n parameterMap.set(item.name, item);\n return parameterMap;\n },\n new Map<string, StoredProcedureParameter>(),\n );\n const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;\n if (storedProcedureDefinition == null) {",
"score": 0.8087671399116516
}
] |
typescript
|
const property = findPropertyCaseInsensitive(types, type);
|
import { RequestInfo, RequestInit, Response } from 'node-fetch';
import { cloneResponse } from './cloneResponse';
import { timeSpan } from './timeSpan';
import { apiDebug }from './apiDebug';
import { apiReport } from './apiReport';
import { getRequestMock, saveRequestMock } from './apiCache';
const fetch = (url: URL | RequestInfo, init?: RequestInit) =>
import('node-fetch').then(({ default: fetch }) => fetch(url, init));
type RequestOptions = RequestInit & {
shouldReport?: boolean;
};
export const apiWithLog = async (
init: RequestInfo,
optionsApi: RequestOptions = { method: 'GET' },
): Promise<Response> => {
const end = timeSpan();
const options = {
...optionsApi,
headers: {
...(optionsApi.headers || {}),
'user-agent': 'node-fetch',
},
};
const requestMock = await getRequestMock(init, options);
if (requestMock) {
return requestMock;
}
return fetch(init, options).then(async (response) => {
const durationTime = end();
const text = await response.text();
let json: any = null;
try {
json = JSON.parse(text);
} catch (err) {
// eslint-disable-next-line
}
const getBody = (): Record<string, string> => {
if (json) {
return {
json,
};
}
return {
text,
};
};
|
await saveRequestMock(init, options, text, response);
|
apiDebug({
init,
options,
durationTime,
getBody,
response,
});
await apiReport({
init,
options,
getBody,
response,
json,
text,
});
const { responseCopy } = await cloneResponse(response, text);
return responseCopy;
});
};
|
src/apiWithLog.ts
|
entria-apiWithLog-be8c368
|
[
{
"filename": "src/apiCache.ts",
"retrieved_chunk": " const { text, response } = mock;\n const { responseCopy } = await cloneResponse(response, text);\n // eslint-disable-next-line\n console.log('mock-cache: ', requestKey);\n if (process.env.DEBUG === 'true') {\n // eslint-disable-next-line\n const { agent, ...optionsWithoutAgent } = options;\n const curl = getCurl(init, options);\n const getBody = () => {\n let json = null;",
"score": 0.8839869499206543
},
{
"filename": "src/apiCache.ts",
"retrieved_chunk": "export const saveRequestMock = async (\n init: RequestInfo,\n options: RequestInit,\n text: string,\n response: Response,\n) => {\n if (process.env.WRITE_MOCK !== 'true') {\n return;\n }\n // only save ok requests 200",
"score": 0.8808432221412659
},
{
"filename": "src/apiCache.ts",
"retrieved_chunk": " try {\n json = JSON.parse(text);\n } catch (err) {}\n if (json) {\n return {\n json,\n };\n }\n return {\n text,",
"score": 0.8648794293403625
},
{
"filename": "src/cloneResponse.ts",
"retrieved_chunk": " ok: response?.ok,\n size: response?.size,\n url: response?.url,\n });\n return {\n responseCopy,\n text,\n json,\n };\n};",
"score": 0.8457508683204651
},
{
"filename": "src/apiReport.ts",
"retrieved_chunk": "export const apiReport = async ({\n init,\n options,\n getBody,\n response,\n json,\n text,\n}: ApiReport) => {\n const canReport =\n typeof options?.shouldReport === 'boolean' ? options.shouldReport : true;",
"score": 0.8413585424423218
}
] |
typescript
|
await saveRequestMock(init, options, text, response);
|
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
|
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
|
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
|
src/core/helpers/type-analyzer/index.test.ts
|
xlboy-ts-type-hidden-a749a29
|
[
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 0.8420060276985168
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.FUNCTION_CALL_GENERIC, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n }\n // context: `<number>a`, get `<number>`\n function handleParentTypeAssertionExpr(\n this: TypeAnalyzer,\n parent: ts.TypeAssertion,",
"score": 0.8375416994094849
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.push({ kind, range: { pos, end }, text });\n }\n}",
"score": 0.8368245959281921
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " analyze() {\n this.visit(this.sourceFile, null);\n this.cleanAnalyzedTypes();\n return this.analyzedTypes;\n }\n private cleanAnalyzedTypes() {\n clearUselessTypes.call(this);\n clearLineBreakOfStartOrEnd.call(this);\n return;\n function clearLineBreakOfStartOrEnd(this: TypeAnalyzer) {",
"score": 0.8229129314422607
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " }\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_OVERLOAD, [\n startPos,\n parent.end\n ]);\n } else {\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_OVERLOAD, [\n parent.pos,\n parent.end\n ]);",
"score": 0.8148583173751831
}
] |
typescript
|
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
|
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind:
|
TYPE_KIND.TYPE_ALIAS
},
{
|
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
|
src/core/helpers/type-analyzer/index.test.ts
|
xlboy-ts-type-hidden-a749a29
|
[
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.push({ kind, range: { pos, end }, text });\n }\n}",
"score": 0.8574470281600952
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 0.8556220531463623
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": "import { isEqual } from 'lodash-es';\nimport ts from 'typescript';\nimport { TYPE_KIND } from './constants';\nexport interface AnalyzedType {\n kind: TYPE_KIND;\n range: ts.TextRange;\n text: string;\n}\nexport class TypeAnalyzer {\n public sourceFile: ts.SourceFile;",
"score": 0.8364266157150269
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " // [1]. `declare const a: number`, [2]. `: number`. remove [2]\n function clearUselessTypes(this: TypeAnalyzer) {\n const indexsToRemove = new Set<number>();\n this.analyzedTypes.forEach((type, index) => {\n if (indexsToRemove.has(index)) return;\n this.analyzedTypes.forEach((_type, _index) => {\n if (index === _index || indexsToRemove.has(_index)) return;\n if (isEqual(_type, type)) return indexsToRemove.add(index);\n if (type.range.pos >= _type.range.pos) {\n if (type.range.end < _type.range.end) indexsToRemove.add(index);",
"score": 0.8279117345809937
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.FUNCTION_CALL_GENERIC, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n }\n // context: `<number>a`, get `<number>`\n function handleParentTypeAssertionExpr(\n this: TypeAnalyzer,\n parent: ts.TypeAssertion,",
"score": 0.8254261016845703
}
] |
typescript
|
TYPE_KIND.TYPE_ALIAS
},
{
|
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind:
|
TYPE_KIND.SATISFIES_OPERATOR
},
{
|
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
|
src/core/helpers/type-analyzer/index.test.ts
|
xlboy-ts-type-hidden-a749a29
|
[
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.push({ kind, range: { pos, end }, text });\n }\n}",
"score": 0.8401964902877808
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 0.8370873332023621
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": "import { isEqual } from 'lodash-es';\nimport ts from 'typescript';\nimport { TYPE_KIND } from './constants';\nexport interface AnalyzedType {\n kind: TYPE_KIND;\n range: ts.TextRange;\n text: string;\n}\nexport class TypeAnalyzer {\n public sourceFile: ts.SourceFile;",
"score": 0.8156370520591736
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " analyze() {\n this.visit(this.sourceFile, null);\n this.cleanAnalyzedTypes();\n return this.analyzedTypes;\n }\n private cleanAnalyzedTypes() {\n clearUselessTypes.call(this);\n clearLineBreakOfStartOrEnd.call(this);\n return;\n function clearLineBreakOfStartOrEnd(this: TypeAnalyzer) {",
"score": 0.8112542033195496
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.FUNCTION_CALL_GENERIC, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n }\n // context: `<number>a`, get `<number>`\n function handleParentTypeAssertionExpr(\n this: TypeAnalyzer,\n parent: ts.TypeAssertion,",
"score": 0.8105056285858154
}
] |
typescript
|
TYPE_KIND.SATISFIES_OPERATOR
},
{
|
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind:
|
TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
|
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
|
src/core/helpers/type-analyzer/index.test.ts
|
xlboy-ts-type-hidden-a749a29
|
[
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 0.8506110906600952
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.push({ kind, range: { pos, end }, text });\n }\n}",
"score": 0.8492471575737
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.FUNCTION_CALL_GENERIC, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n }\n // context: `<number>a`, get `<number>`\n function handleParentTypeAssertionExpr(\n this: TypeAnalyzer,\n parent: ts.TypeAssertion,",
"score": 0.8330206274986267
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " analyze() {\n this.visit(this.sourceFile, null);\n this.cleanAnalyzedTypes();\n return this.analyzedTypes;\n }\n private cleanAnalyzedTypes() {\n clearUselessTypes.call(this);\n clearLineBreakOfStartOrEnd.call(this);\n return;\n function clearLineBreakOfStartOrEnd(this: TypeAnalyzer) {",
"score": 0.83111971616745
},
{
"filename": "src/core/editor-context.ts",
"retrieved_chunk": " activeEditorInfo.foldedTypeRanges = [];\n for await (const type of filteredAnalyzedTypes) {\n const typeRange = new vscode.Range(\n activeEditorWindow.document.positionAt(type.range.pos),\n activeEditorWindow.document.positionAt(type.range.end)\n );\n const typeText = activeEditorWindow.document.getText(typeRange);\n const typeLineCount = typeText.split('\\n').length;\n if (typeLineCount > 2) {\n const inFoldingRange = (() => {",
"score": 0.8256344199180603
}
] |
typescript
|
TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
|
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
|
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
|
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
|
src/core/helpers/type-analyzer/index.test.ts
|
xlboy-ts-type-hidden-a749a29
|
[
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.push({ kind, range: { pos, end }, text });\n }\n}",
"score": 0.8489766120910645
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 0.8430956602096558
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.FUNCTION_CALL_GENERIC, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n }\n // context: `<number>a`, get `<number>`\n function handleParentTypeAssertionExpr(\n this: TypeAnalyzer,\n parent: ts.TypeAssertion,",
"score": 0.8336261510848999
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " child => child.pos === parent.typeParameters!.pos\n );\n const endIndex = children.findIndex(\n child => child.end === parent.typeParameters!.end\n );\n // <\n const prevNode = children[startIndex - 1];\n // >\n const nextNode = children[endIndex + 1];\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_GENERIC_DEFINITION, [",
"score": 0.8316390514373779
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " }\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_OVERLOAD, [\n startPos,\n parent.end\n ]);\n } else {\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_OVERLOAD, [\n parent.pos,\n parent.end\n ]);",
"score": 0.8284620642662048
}
] |
typescript
|
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
|
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
|
kind: TYPE_KIND.INTERFACE
},
{
|
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
|
src/core/helpers/type-analyzer/index.test.ts
|
xlboy-ts-type-hidden-a749a29
|
[
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.push({ kind, range: { pos, end }, text });\n }\n}",
"score": 0.8707855939865112
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 0.8657114505767822
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": "import { isEqual } from 'lodash-es';\nimport ts from 'typescript';\nimport { TYPE_KIND } from './constants';\nexport interface AnalyzedType {\n kind: TYPE_KIND;\n range: ts.TextRange;\n text: string;\n}\nexport class TypeAnalyzer {\n public sourceFile: ts.SourceFile;",
"score": 0.8440431952476501
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " analyze() {\n this.visit(this.sourceFile, null);\n this.cleanAnalyzedTypes();\n return this.analyzedTypes;\n }\n private cleanAnalyzedTypes() {\n clearUselessTypes.call(this);\n clearLineBreakOfStartOrEnd.call(this);\n return;\n function clearLineBreakOfStartOrEnd(this: TypeAnalyzer) {",
"score": 0.8418338894844055
},
{
"filename": "src/core/editor-context.ts",
"retrieved_chunk": " activeEditorInfo.foldedTypeRanges = [];\n for await (const type of filteredAnalyzedTypes) {\n const typeRange = new vscode.Range(\n activeEditorWindow.document.positionAt(type.range.pos),\n activeEditorWindow.document.positionAt(type.range.end)\n );\n const typeText = activeEditorWindow.document.getText(typeRange);\n const typeLineCount = typeText.split('\\n').length;\n if (typeLineCount > 2) {\n const inFoldingRange = (() => {",
"score": 0.8357719779014587
}
] |
typescript
|
kind: TYPE_KIND.INTERFACE
},
{
|
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind:
|
TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
|
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
|
src/core/helpers/type-analyzer/index.test.ts
|
xlboy-ts-type-hidden-a749a29
|
[
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.push({ kind, range: { pos, end }, text });\n }\n}",
"score": 0.8194042444229126
},
{
"filename": "src/core/helpers/type-analyzer/constants.ts",
"retrieved_chunk": " * declare module d {}\n * declare namespace e {}\n * declare enum f {}\n * declare global {}\n * declare module 'g' {}\n * ```\n * ⏭️ 👆 All statements that begin with `declare`\n */\n DECLARE_STATEMENT = 'declare-statement'\n}",
"score": 0.818540632724762
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " );\n // :\n const prevNode = children[index - 1];\n // !\n const operatorNode = children[index - 2];\n const hasOperatorNode = operatorNode.kind === ts.SyntaxKind.ExclamationToken;\n this.pushAnalyzedType(TYPE_KIND.VARIABLE_TYPE_DEFINITION, [\n hasOperatorNode ? operatorNode.end - 1 : prevNode.end - 1,\n curChild.end\n ]);",
"score": 0.811137318611145
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n // function a<B extends 222>(test: ...): void;\n const isOverload = parent.body === undefined;\n if (isOverload) {\n // public a<B extends 222>(test: ...): void;\n if (ts.isMethodDeclaration(parent)) {\n let startPos = parent.name.end;\n if (parent.modifiers && parent.modifiers.length > 0) {\n startPos = parent.modifiers[0].pos;",
"score": 0.8098235130310059
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 0.8095797896385193
}
] |
typescript
|
TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
|
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind:
|
TYPE_KIND.DECLARE_STATEMENT
},
{
|
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
|
src/core/helpers/type-analyzer/index.test.ts
|
xlboy-ts-type-hidden-a749a29
|
[
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.push({ kind, range: { pos, end }, text });\n }\n}",
"score": 0.8572421073913574
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 0.854698657989502
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": "import { isEqual } from 'lodash-es';\nimport ts from 'typescript';\nimport { TYPE_KIND } from './constants';\nexport interface AnalyzedType {\n kind: TYPE_KIND;\n range: ts.TextRange;\n text: string;\n}\nexport class TypeAnalyzer {\n public sourceFile: ts.SourceFile;",
"score": 0.8424421548843384
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " analyze() {\n this.visit(this.sourceFile, null);\n this.cleanAnalyzedTypes();\n return this.analyzedTypes;\n }\n private cleanAnalyzedTypes() {\n clearUselessTypes.call(this);\n clearLineBreakOfStartOrEnd.call(this);\n return;\n function clearLineBreakOfStartOrEnd(this: TypeAnalyzer) {",
"score": 0.8386907577514648
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " // [1]. `declare const a: number`, [2]. `: number`. remove [2]\n function clearUselessTypes(this: TypeAnalyzer) {\n const indexsToRemove = new Set<number>();\n this.analyzedTypes.forEach((type, index) => {\n if (indexsToRemove.has(index)) return;\n this.analyzedTypes.forEach((_type, _index) => {\n if (index === _index || indexsToRemove.has(_index)) return;\n if (isEqual(_type, type)) return indexsToRemove.add(index);\n if (type.range.pos >= _type.range.pos) {\n if (type.range.end < _type.range.end) indexsToRemove.add(index);",
"score": 0.8348729014396667
}
] |
typescript
|
TYPE_KIND.DECLARE_STATEMENT
},
{
|
import { HTTP_METHODS } from './constants'
import * as subject from './api'
import * as z from 'zod'
import { HTTPMethod } from './types'
import { kebabToCamel } from './transforms'
const reqMock = vi.fn()
function successfulFetch(response: string | Record<string, unknown>) {
return async (input: URL | RequestInfo, init?: RequestInit | undefined) => {
reqMock({
url: input,
headers: init?.headers,
method: init?.method,
body: init?.body,
})
return new Response(
typeof response === 'string' ? response : JSON.stringify(response),
)
}
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('enhancedFetch', () => {
describe('json', () => {
it('should be untyped by default', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const result = await subject
.enhancedFetch('https://example.com/api/users')
.then((r) => r.json())
type _R = Expect<Equal<typeof result, unknown>>
expect(result).toEqual({ foo: 'bar' })
})
it('should accept a type', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const result = await subject
.enhancedFetch('https://example.com/api/users')
.then((r) => r.json<{ foo: string }>())
type _R = Expect<Equal<typeof result, { foo: string }>>
expect(result).toEqual({ foo: 'bar' })
})
it('should accept a parser', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const result = await subject
.enhancedFetch('https://example.com/api/users')
.then((r) => r.json(z.object({ foo: z.string() })))
type _R = Expect<Equal<typeof result, { foo: string }>>
expect(result).toEqual({ foo: 'bar' })
})
})
describe('text', () => {
it('should be untyped by default', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const result = await subject
.enhancedFetch('https://example.com/api/users')
.then((r) => r.text())
type _R = Expect<Equal<typeof result, string>>
expect(result).toEqual(`{"foo":"bar"}`)
})
it('should accept a type', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch('john@doe.com'),
)
const result = await subject
.enhancedFetch('https://example.com/api/users')
.then((r) => r.text<`${string}@${string}.${string}`>())
type _R = Expect<Equal<typeof result, `${string}@${string}.${string}`>>
expect(result).toEqual('john@doe.com')
})
it('should accept a parser', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch('john@doe.com'),
)
const result = await subject
.enhancedFetch('https://example.com/api/users')
.then((r) => r.text(z.string().email()))
type _R = Expect<Equal<typeof result, string>>
expect(result).toEqual('john@doe.com')
})
})
it('should accept a schema that transforms the response', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: { 'deep-nested': { 'kind-of-value': true } } }),
)
const result = await subject
.enhancedFetch('https://example.com/api/users')
.then((r) =>
r.json(
z
.object({
foo: z.object({
'deep-nested': z.object({ 'kind-of-value': z.boolean() }),
}),
})
.transform(kebabToCamel),
),
)
type _R = Expect<
Equal<typeof result, { foo: { deepNested: { kindOfValue: boolean } } }>
>
expect(result).toEqual({ foo: { deepNested: { kindOfValue: true } } })
})
it('should replace params in the URL', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
await subject.enhancedFetch(
'https://example.com/api/users/:user/page/:page',
{
params: {
user: '1',
page: '2',
// @ts-expect-error
foo: 'bar',
},
},
)
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users/1/page/2',
headers: new Headers({
'content-type': 'application/json',
}),
})
})
it('should accept a requestInit and a query', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
await subject.enhancedFetch('https://example.com/api/users', {
headers: { Authorization: 'Bearer 123' },
query: { admin: 'true' },
})
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users?admin=true',
headers: new Headers({
authorization: 'Bearer 123',
'content-type': 'application/json',
}),
})
})
it('should accept a stringified body', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
await subject.enhancedFetch('https://example.com/api/users', {
body: JSON.stringify({ id: 1, name: { first: 'John', last: 'Doe' } }),
method: 'POST',
})
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users',
headers: new Headers({ 'content-type': 'application/json' }),
method: 'POST',
body: `{"id":1,"name":{"first":"John","last":"Doe"}}`,
})
})
it('should stringify the body', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
await subject.enhancedFetch('https://example.com/api/users', {
body: { id: 1, name: { first: 'John', last: 'Doe' } },
method: 'POST',
})
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users',
headers: new Headers({ 'content-type': 'application/json' }),
method: 'POST',
body: `{"id":1,"name":{"first":"John","last":"Doe"}}`,
})
})
it('should accept a trace function for debugging purposes', async () => {
const trace = vi.fn()
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
await subject.enhancedFetch('https://example.com/api/users', {
body: { id: 1, name: { first: 'John', last: 'Doe' } },
query: { admin: 'true' },
trace,
method: 'POST',
})
expect(trace).toHaveBeenCalledWith(
'https://example.com/api/users?admin=true',
{
headers: new Headers({ 'content-type': 'application/json' }),
method: 'POST',
body: `{"id":1,"name":{"first":"John","last":"Doe"}}`,
},
)
})
})
describe('makeFetcher', () => {
it('should return a applied enhancedFetch', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const service = subject.makeFetcher('https://example.com/api')
const result = await service('/users', { method: 'post' }).then((r) =>
r.json(z.object({ foo: z.string() })),
)
type _R = Expect<Equal<typeof result, { foo: string }>>
expect(result).toEqual({ foo: 'bar' })
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users',
headers: new Headers({ 'content-type': 'application/json' }),
method: 'post',
})
})
it('should add headers to the request', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const fetcher = subject.makeFetcher('https://example.com/api', {
Authorization: 'Bearer 123',
})
await fetcher('/users')
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users',
headers: new Headers({
authorization: 'Bearer 123',
'content-type': 'application/json',
}),
})
})
it('should accept a typed params object', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const fetcher = subject.makeFetcher('https://example.com/api')
await fetcher('/users/:id', {
params: {
id: '1',
// @ts-expect-error
foo: 'bar',
},
})
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users/1',
headers: new Headers({ 'content-type': 'application/json' }),
})
})
it('should accept a function for dynamic headers', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const fetcher = subject.makeFetcher('https://example.com/api', () => ({
Authorization: 'Bearer 123',
}))
await fetcher('/users')
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users',
headers: new Headers({
authorization: 'Bearer 123',
'content-type': 'application/json',
}),
})
})
it('should accept an async function for dynamic headers', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const fetcher = subject.makeFetcher(
'https://example.com/api',
async () => ({
Authorization: 'Bearer 123',
}),
)
await fetcher('/users')
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users',
headers: new Headers({
authorization: 'Bearer 123',
'content-type': 'application/json',
}),
})
})
it('should accept a query, trace, and JSON-like body', async () => {
const trace = vi.fn()
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const fetcher = subject.makeFetcher('https://example.com/api')
await fetcher('/users', {
method: 'POST',
body: { id: 1, name: { first: 'John', last: 'Doe' } },
query: { admin: 'true' },
trace,
})
expect(trace).toHaveBeenCalledWith(
'https://example.com/api/users?admin=true',
{
headers: new Headers({ 'content-type': 'application/json' }),
method: 'POST',
body: `{"id":1,"name":{"first":"John","last":"Doe"}}`,
},
)
})
})
describe('makeService', () => {
it('should return an object with http methods', () => {
const service = subject.makeService('https://example.com/api')
|
for (const method of HTTP_METHODS) {
|
expect(
typeof service[method.toLocaleLowerCase() as Lowercase<HTTPMethod>],
).toBe('function')
}
})
it('should return an API with enhancedFetch', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const service = subject.makeService('https://example.com/api')
const result = await service
.post('/users')
.then((r) => r.json(z.object({ foo: z.string() })))
type _R = Expect<Equal<typeof result, { foo: string }>>
expect(result).toEqual({ foo: 'bar' })
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users',
headers: new Headers({ 'content-type': 'application/json' }),
method: 'POST',
})
})
it('should accept a typed params object', async () => {
vi.spyOn(global, 'fetch').mockImplementationOnce(
successfulFetch({ foo: 'bar' }),
)
const service = subject.makeService('https://example.com/api')
await service.get('/users/:id', {
params: {
id: '1',
// @ts-expect-error
foo: 'bar',
},
})
expect(reqMock).toHaveBeenCalledWith({
url: 'https://example.com/api/users/1',
headers: new Headers({ 'content-type': 'application/json' }),
method: 'GET',
})
})
})
describe('typedResponse', () => {
it('should return unknown by default when turning into a JSON', async () => {
const result = await subject.typedResponse(new Response('1')).json()
type _R = Expect<Equal<typeof result, unknown>>
expect(result).toEqual(1)
})
it('should accept a type for the JSON method', async () => {
const result = await subject
.typedResponse(new Response(`{"foo":"bar"}`))
.json<{ foo: string }>()
type _R = Expect<Equal<typeof result, { foo: string }>>
expect(result).toEqual({ foo: 'bar' })
})
it('should accept a parser for the JSON method', async () => {
const result = await subject
.typedResponse(new Response(`{"foo":"bar"}`))
.json(z.object({ foo: z.string() }))
type _R = Expect<Equal<typeof result, { foo: string }>>
expect(result).toEqual({ foo: 'bar' })
})
})
|
src/api.test.ts
|
gustavoguichard-make-service-e5a7bea
|
[
{
"filename": "src/primitives.test.ts",
"retrieved_chunk": " it('should return a URL which is baseURL and path joined', () => {\n expect(subject.makeGetApiURL('https://example.com/api')('/users')).toBe(\n 'https://example.com/api/users',\n )\n })\n it('should accept an object-like queryString and return it joined to the URL', () => {\n const getApiURL = subject.makeGetApiURL('https://example.com/api')\n expect(getApiURL('/users', { id: '1' })).toBe(\n 'https://example.com/api/users?id=1',\n )",
"score": 0.8331354856491089
},
{
"filename": "src/primitives.test.ts",
"retrieved_chunk": " expect(subject.ensureStringBody(rs)).toBe(rs)\n const fd = new FormData()\n expect(subject.ensureStringBody(fd)).toBe(fd)\n const usp = new URLSearchParams()\n expect(subject.ensureStringBody(usp)).toBe(usp)\n const blob = new Blob()\n expect(subject.ensureStringBody(blob)).toBe(blob)\n })\n})\ndescribe('makeGetApiURL', () => {",
"score": 0.8296101689338684
},
{
"filename": "src/primitives.test.ts",
"retrieved_chunk": " })\n})\ndescribe('ensureStringBody', () => {\n it('should return the same if body was string', () => {\n expect(subject.ensureStringBody('foo')).toBe('foo')\n })\n it('should return the same if body was not defined', () => {\n expect(subject.ensureStringBody()).toBeUndefined()\n })\n it('should stringify the body if it is a JSON-like value', () => {",
"score": 0.8150316476821899
},
{
"filename": "src/primitives.test.ts",
"retrieved_chunk": " ).toBe('https://example.com/api/users')\n expect(\n subject.makeGetApiURL(new URL('https://example.com/api/'))('///users'),\n ).toBe('https://example.com/api/users')\n })\n it('should add missing slashes', () => {\n expect(\n subject.makeGetApiURL(new URL('https://example.com/api'))('users'),\n ).toBe('https://example.com/api/users')\n })",
"score": 0.8133678436279297
},
{
"filename": "src/primitives.test.ts",
"retrieved_chunk": " expect(getApiURL('/users', { active: 'true', page: '2' })).toBe(\n 'https://example.com/api/users?active=true&page=2',\n )\n })\n it('should accept a URL as baseURL and remove extra slashes', () => {\n expect(\n subject.makeGetApiURL(new URL('https://example.com/api'))('/users'),\n ).toBe('https://example.com/api/users')\n expect(\n subject.makeGetApiURL(new URL('https://example.com/api/'))('/users'),",
"score": 0.8106040954589844
}
] |
typescript
|
for (const method of HTTP_METHODS) {
|
import config from '../config'
import { ImportData, ImportGroups, LibraryRule } from '../types'
const getImportDepth = (path: string) => {
return path.split('/').length
}
const asc = (a, b) => {
const depthA = getImportDepth(a.path)
const depthB = getImportDepth(b.path)
if (depthA !== depthB) {
return depthA - depthB
} else {
return a.path.localeCompare(b.path)
}
}
const desc = (a, b) => {
const depthA = getImportDepth(a.path)
const depthB = getImportDepth(b.path)
if (depthA !== depthB) {
return depthB - depthA
} else {
return a.path.localeCompare(b.path)
}
}
const sortLibraries = (imports: ImportData[]) => {
let result: ImportData[] = []
const groups = {}
for (const library of config.libs) {
groups[library.name] = []
for (let i = 0; i < imports.length; i++) {
const importData = imports[i]
if (
(library.rule === LibraryRule.EXACT && importData.path === library.name) ||
(library.rule === LibraryRule.STARTS && importData.path.startsWith(library.name)) ||
(library.rule === LibraryRule.INCLUDES && importData.path.includes(library.name))
) {
groups[library.name].push(importData)
imports.splice(i, 1)
i--
}
}
}
for (const groupKey in groups) {
groups[groupKey].sort(asc)
result = [...result, ...groups[groupKey]]
}
imports.sort(asc)
result = [...result, ...imports]
return destructuringSort(result)
}
const sortAliases = (imports: ImportData[]) => {
const sortedImports = imports.sort(asc)
return destructuringSort(sortedImports)
}
const sortRelatives = (imports: ImportData[]) => {
const outFolderImports = []
const currentFolderImports = []
for (const importData of imports) {
if (importData.path.startsWith('./')) {
currentFolderImports.push(importData)
} else {
outFolderImports.push(importData)
}
}
outFolderImports.sort(desc)
currentFolderImports.sort(desc)
return destructuringSort(outFolderImports.concat(currentFolderImports))
}
const destructuringSort = (imports: ImportData[]) => {
const result = []
for (const importData of imports) {
const searchResult
|
= importData.raw.match(/\{[\s\S]+?}/gm)
if (searchResult) {
|
const importElementsString = searchResult[0].replace(/[{}\s]/gm, '')
const importElements = importElementsString
.split(',')
.filter((importElement) => importElement)
importElements.sort(function (a, b) {
if (a.length === b.length) {
return a.localeCompare(b)
} else {
return a.length - b.length
}
})
result.push({
raw: importData.raw.replace(/\{[\s\S]+?}/gm, `{ ${importElements.join(',')} }`),
path: importData.path,
})
} else {
result.push(importData)
}
}
return result
}
export const sortImportGroups = (inputGroups: ImportGroups) => {
return {
libraries: sortLibraries(inputGroups.libraries),
aliases: sortAliases(inputGroups.aliases),
relatives: sortRelatives(inputGroups.relatives),
directRelatives: sortRelatives(inputGroups.directRelatives),
}
}
|
src/utils/sort-import-groups.ts
|
crmapache-prettier-plugin-sort-react-imports-a237c21
|
[
{
"filename": "src/utils/prepare-code.ts",
"retrieved_chunk": " result += '\\n'\n for (const importData of importGroups.relatives) {\n result += `${importData.raw}\\n`\n }\n if (importGroups.directRelatives.length > 0) {\n result += '\\n'\n for (const importData of importGroups.directRelatives) {\n result += `${importData.raw}\\n`\n }\n }",
"score": 0.8173624277114868
},
{
"filename": "src/utils/split-imports-to-groups.ts",
"retrieved_chunk": "export const splitImportsIntoGroups = (imports: Import[]): ImportGroups => {\n const libraries: ImportData[] = []\n const aliases: ImportData[] = []\n const relatives: ImportData[] = []\n const directRelatives: ImportData[] = []\n const userAliases = config.aliases\n for (const importString of imports) {\n const importSource = extractImportPath(importString)\n if (\n ((userAliases.length < 1 && importSource.startsWith('@')) ||",
"score": 0.8128265738487244
},
{
"filename": "src/utils/index.ts",
"retrieved_chunk": "export * from './split-imports-to-groups'\nexport * from './sort-import-groups'\nexport * from './prepare-code'",
"score": 0.8009445667266846
},
{
"filename": "src/utils/split-imports-to-groups.ts",
"retrieved_chunk": " matchToUserAlias(importSource, userAliases)) &&\n !isDireactAliasImport(importSource, importString)\n ) {\n aliases.push({ raw: importString, path: importSource })\n } else if (importSource.startsWith('.') && importString.includes('from')) {\n relatives.push({ raw: importString, path: importSource })\n } else if (importSource.startsWith('.') || isDireactAliasImport(importSource, importString)) {\n directRelatives.push({ raw: importString, path: importSource })\n } else {\n libraries.push({ raw: importString, path: importSource })",
"score": 0.7992058396339417
},
{
"filename": "src/utils/prepare-code.ts",
"retrieved_chunk": "import { ImportGroups } from '../types'\nexport const prepareCode = (importGroups: ImportGroups) => {\n let result = ''\n for (const importData of importGroups.libraries) {\n result += `${importData.raw}\\n`\n }\n result += '\\n'\n for (const importData of importGroups.aliases) {\n result += `${importData.raw}\\n`\n }",
"score": 0.79593825340271
}
] |
typescript
|
= importData.raw.match(/\{[\s\S]+?}/gm)
if (searchResult) {
|
import type { ReadonlyDeep } from 'type-fest';
import vscode from 'vscode';
import fs from 'fs-extra';
import { log } from './log';
import { TYPE_KIND } from './helpers/type-analyzer/constants';
interface ExtensionConfig {
/** @default true */
enabled: boolean;
/** @default `{$ExtensionRootPath}/res/type-icon.png` */
typeIconPath: string;
/** @default [] */
ignoreTypeKinds: TYPE_KIND[];
}
const defaultTypeIconPath = `${__dirname}/../res/type-icon.png`;
export class Config {
private static _instance: Config;
/** instance */
static get i(): Config {
return (Config._instance ??= new Config());
}
get(): ReadonlyDeep<ExtensionConfig> {
return Object.freeze(this.config);
}
private sync() {
const config = vscode.workspace.getConfiguration('ts-type-hidden');
this.config = {
enabled: config.get('enabled', true),
typeIconPath: config.get('typeIconPath') || defaultTypeIconPath,
ignoreTypeKinds: config.get('ignoreTypeKinds', [])
} satisfies ExtensionConfig;
}
private config!: ExtensionConfig;
private watchCallbacks: Array<Function> = [];
private constructor() {
this.sync();
this.verify();
this.watch();
}
update() {
this.sync();
|
log.appendLine(`Config updated:
${JSON.stringify(this.config, null, 2)}
|
`);
}
registerWatchCallback(fn: Function) {
this.watchCallbacks.push(fn);
}
private verify() {
if (!fs.existsSync(this.config.typeIconPath)) {
vscode.window.showErrorMessage(
'[ts-type-hidden configuration]: \n`typeIconPath` is not a valid path'
);
this.config.typeIconPath = defaultTypeIconPath;
}
for (let i = this.config.ignoreTypeKinds.length - 1; i >= 0; i--) {
const typeKindToIgnore = this.config.ignoreTypeKinds[i];
const isInvalid = !Object.values(TYPE_KIND).includes(typeKindToIgnore);
if (isInvalid) {
this.config.ignoreTypeKinds.splice(i, 1);
vscode.window.showErrorMessage(
`[ts-type-hidden configuration]: \n\`ignoreTypeKinds.${typeKindToIgnore}\` is not a valid value`
);
}
}
}
private watch() {
vscode.workspace.onDidChangeConfiguration(() => {
this.update();
this.verify();
this.watchCallbacks.forEach(cb => cb());
});
}
}
|
src/core/config.ts
|
xlboy-ts-type-hidden-a749a29
|
[
{
"filename": "src/index.ts",
"retrieved_chunk": " if (!Config.i.get().enabled) {\n log.appendLine('Extension disabled, exiting...');\n return;\n }\n GlobalState.init(vscodeContext);\n EditorContext.init();\n StatusBar.init(vscodeContext);\n registerCommand(vscodeContext);\n}",
"score": 0.7774835824966431
},
{
"filename": "src/core/editor-context.ts",
"retrieved_chunk": " Config.i.registerWatchCallback(this.decoration.refreshIcon);\n if (GlobalState.i.isHiddenMode) this.hideType(true);\n }\n hideType(needToFold = false) {\n const activeEditorWindow = vscode.window.activeTextEditor;\n if (activeEditorWindow && this.utils.isTargetDocument(activeEditorWindow.document)) {\n const activeEditorInfo = this.editors.get(activeEditorWindow.document.fileName);\n if (!activeEditorInfo) return;\n const filteredAnalyzedTypes = activeEditorInfo.analyzedTypes\n .filter(type => !Config.i.get().ignoreTypeKinds.includes(type.kind))",
"score": 0.7575225830078125
},
{
"filename": "src/index.ts",
"retrieved_chunk": "import vscode from 'vscode';\nimport { version } from '../package.json';\nimport { registerCommand } from './core/command';\nimport { Config } from './core/config';\nimport { EditorContext } from './core/editor-context';\nimport { log } from './core/log';\nimport { StatusBar } from './core/status-bar';\nimport { GlobalState } from './core/global-state';\nexport function activate(vscodeContext: vscode.ExtensionContext) {\n log.appendLine(`TS Type Hidden for VS Code v${version}\\n`);",
"score": 0.7563979625701904
},
{
"filename": "src/core/command.ts",
"retrieved_chunk": " })\n );\n}",
"score": 0.7508869171142578
},
{
"filename": "src/core/status-bar.ts",
"retrieved_chunk": " vscodeContext.subscriptions.push(this.statusBarItem);\n }\n changeStatus(isHiddenMode: boolean) {\n this.statusBarItem.text = isHiddenMode ? 'TH ✅' : 'TH ❌';\n this.statusBarItem.tooltip =\n '[TS Type Hidden] - Click to toggle hidden mode (Current mode: ' +\n (isHiddenMode ? 'On' : 'Off') +\n ')';\n }\n}",
"score": 0.7484002113342285
}
] |
typescript
|
log.appendLine(`Config updated:
${JSON.stringify(this.config, null, 2)}
|
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
|
await createDirectory(getConfig().currentVectorStoreDatabasePath);
|
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
|
src/lib/contextManager.ts
|
gmickel-memorybot-bad0302
|
[
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "const memoryDirectory = path.join(projectRootDir, process.env.MEMORY_VECTOR_STORE_DIR || 'memory');\nlet memoryVectorStore: HNSWLib;\ntry {\n memoryVectorStore = await HNSWLib.load(memoryDirectory, new OpenAIEmbeddings());\n} catch {\n output.write(`${chalk.blue(`Creating a new memory vector store index in the ${memoryDirectory} directory`)}\\n`);\n memoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });",
"score": 0.8760066628456116
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport { HNSWLib } from 'langchain/vectorstores/hnswlib';\nimport fs from 'fs/promises';\nimport path from 'path';\nimport { stdout as output } from 'node:process';\nimport { OpenAIEmbeddings } from 'langchain/embeddings/openai';\nimport { Document } from 'langchain/document';\nimport { BufferWindowMemory } from 'langchain/memory';\nimport { getProjectRoot } from '../config/index.js';\nconst projectRootDir = getProjectRoot();",
"score": 0.830948531627655
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " }\n return chalk.red(`All files in the memory directory have been deleted: ${error}`);\n }\n}\nasync function resetMemoryVectorStore(onReset: (newMemoryVectorStore: HNSWLib) => void) {\n const newMemoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });\n await deleteMemoryDirectory();",
"score": 0.8231281042098999
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " onReset(newMemoryVectorStore);\n}\nfunction setMemoryVectorStore(newMemoryVectorStore: HNSWLib) {\n memoryWrapper.vectorStoreInstance = newMemoryVectorStore;\n}\nexport {\n getMemoryVectorStore,\n setMemoryVectorStore,\n addDocumentsToMemoryVectorStore,\n resetMemoryVectorStore,",
"score": 0.8030155897140503
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "async function getMemoryVectorStore() {\n return memoryWrapper.vectorStoreInstance;\n}\nfunction getBufferWindowMemory() {\n return bufferWindowMemory;\n}\nasync function saveMemoryVectorStore() {\n await memoryWrapper.vectorStoreInstance.save(memoryDirectory);\n}\nasync function addDocumentsToMemoryVectorStore(",
"score": 0.789269745349884
}
] |
typescript
|
await createDirectory(getConfig().currentVectorStoreDatabasePath);
|
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
|
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
|
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
|
src/core/helpers/type-analyzer/index.test.ts
|
xlboy-ts-type-hidden-a749a29
|
[
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.ANGLE_BRACKETS_ASSERTION, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n // context = `a as number` | `a satisfies number`, curChild = `number`\n function handleParentAsOrSatisfiesExpr(\n this: TypeAnalyzer,\n parent: ts.AsExpression | ts.SatisfiesExpression,\n curChild: ts.Node",
"score": 0.8254477381706238
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 0.823991060256958
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.push({ kind, range: { pos, end }, text });\n }\n}",
"score": 0.8232643604278564
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " const prevNode = children[startIndex - 1];\n // >\n const nextNode = children[endIndex + 1];\n return this.pushAnalyzedType(TYPE_KIND.TSX_COMPONENT_GENERIC, [\n prevNode.end - 1,\n nextNode.pos\n ]);\n }\n }\n // [class] context: `class A { a?: number }`, get `?: number`",
"score": 0.817371129989624
},
{
"filename": "src/core/helpers/type-analyzer/constants.ts",
"retrieved_chunk": " ANGLE_BRACKETS_ASSERTION = 'angle-brackets-assertion',\n /**\n * ```ts\n * fn() as any;\n * ```\n * ⏭️ ` as any`\n */\n AS_ASSERTION = 'as-assertion',\n /**\n * ```ts",
"score": 0.8172931671142578
}
] |
typescript
|
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
|
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(getConfig().currentVectorStoreDatabasePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
|
const filesToAdd = await getDirectoryFiles(docsDirectory);
|
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
|
src/lib/contextManager.ts
|
gmickel-memorybot-bad0302
|
[
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "const memoryDirectory = path.join(projectRootDir, process.env.MEMORY_VECTOR_STORE_DIR || 'memory');\nlet memoryVectorStore: HNSWLib;\ntry {\n memoryVectorStore = await HNSWLib.load(memoryDirectory, new OpenAIEmbeddings());\n} catch {\n output.write(`${chalk.blue(`Creating a new memory vector store index in the ${memoryDirectory} directory`)}\\n`);\n memoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });",
"score": 0.922629714012146
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport { HNSWLib } from 'langchain/vectorstores/hnswlib';\nimport fs from 'fs/promises';\nimport path from 'path';\nimport { stdout as output } from 'node:process';\nimport { OpenAIEmbeddings } from 'langchain/embeddings/openai';\nimport { Document } from 'langchain/document';\nimport { BufferWindowMemory } from 'langchain/memory';\nimport { getProjectRoot } from '../config/index.js';\nconst projectRootDir = getProjectRoot();",
"score": 0.8857007622718811
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " }\n return chalk.red(`All files in the memory directory have been deleted: ${error}`);\n }\n}\nasync function resetMemoryVectorStore(onReset: (newMemoryVectorStore: HNSWLib) => void) {\n const newMemoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });\n await deleteMemoryDirectory();",
"score": 0.8658145070075989
},
{
"filename": "src/config/index.ts",
"retrieved_chunk": " return {\n text: 'Loading',\n stream: output,\n discardStdin: false,\n };\n}\nconst defaultConfig: Config = {\n currentVectorStoreDatabasePath: path.join(getProjectRoot(), process.env.VECTOR_STORE_DIR || 'db/default'),\n numContextDocumentsToRetrieve: 6,\n numMemoryDocumentsToRetrieve: 4,",
"score": 0.847303569316864
},
{
"filename": "src/index.ts",
"retrieved_chunk": "import { getRelevantContext } from './lib/vectorStoreUtils.js';\nimport sanitizeInput from './utils/sanitizeInput.js';\nimport { getConfig, getProjectRoot } from './config/index.js';\nconst projectRootDir = getProjectRoot();\ndotenv.config();\n// Set up the chat log directory\nconst chatLogDirectory = path.join(projectRootDir, 'chat_logs');\n// Get the prompt template\nconst systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');\n// Set up the readline interface to read input from the user and write output to the console",
"score": 0.8464484810829163
}
] |
typescript
|
const filesToAdd = await getDirectoryFiles(docsDirectory);
|
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(getConfig().currentVectorStoreDatabasePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler =
|
new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
|
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
|
src/lib/contextManager.ts
|
gmickel-memorybot-bad0302
|
[
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": "import * as cheerio from 'cheerio';\nimport Crawler, { CrawlerRequestResponse } from 'crawler';\nimport { stderr } from 'node:process';\nimport resolveURL from '../utils/resolveURL.js';\n// import TurndownService from 'turndown';\n// const turndownService = new TurndownService();\ntype ProgressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => void;\ninterface Page {\n url: string;\n text: string;",
"score": 0.8597506284713745
},
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": " title: string;\n}\n/* The WebCrawler class is a TypeScript implementation of a web crawler that can extract text from web\npages and follow links to crawl more pages. */\nclass WebCrawler {\n pages: Page[];\n limit: number;\n urls: string[];\n count: number;\n textLengthMinimum: number;",
"score": 0.8536611199378967
},
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": " selector: string;\n progressCallback: ProgressCallback;\n crawler: Crawler;\n constructor(\n urls: string[],\n progressCallback: ProgressCallback,\n selector = 'body',\n limit = 20,\n textLengthMinimum = 200\n ) {",
"score": 0.8530166149139404
},
{
"filename": "src/commands/addURLCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { addURL } from '../lib/contextManager.js';\nconst addURLCommand = createCommand(\n 'add-url',\n ['url'],\n `Scrapes the content from a url and adds it to the context vector store.\\n\n Arguments: \\`url\\`, \\`selector to extract\\` (Default: body), \\`Maximum number of links to follow\\` (Default: 20), \\`Ignore pages with less than n characters\\` (Default: 200)\\n\n Example: /add-url https://dociq.io main 10 500\\n\n This operation may try to generate a large number of embeddings depending on the structure of the web pages and may lead to rate-limiting.\\n",
"score": 0.8475353717803955
},
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": " this.urls = urls;\n this.selector = selector;\n this.limit = limit;\n this.textLengthMinimum = textLengthMinimum;\n this.progressCallback = progressCallback;\n this.count = 0;\n this.pages = [];\n this.crawler = new Crawler({\n maxConnections: 10,\n callback: this.handleRequest,",
"score": 0.8438006639480591
}
] |
typescript
|
new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
|
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(getConfig().currentVectorStoreDatabasePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
|
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
|
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
|
src/lib/contextManager.ts
|
gmickel-memorybot-bad0302
|
[
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "const memoryDirectory = path.join(projectRootDir, process.env.MEMORY_VECTOR_STORE_DIR || 'memory');\nlet memoryVectorStore: HNSWLib;\ntry {\n memoryVectorStore = await HNSWLib.load(memoryDirectory, new OpenAIEmbeddings());\n} catch {\n output.write(`${chalk.blue(`Creating a new memory vector store index in the ${memoryDirectory} directory`)}\\n`);\n memoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });",
"score": 0.9075193405151367
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport { HNSWLib } from 'langchain/vectorstores/hnswlib';\nimport fs from 'fs/promises';\nimport path from 'path';\nimport { stdout as output } from 'node:process';\nimport { OpenAIEmbeddings } from 'langchain/embeddings/openai';\nimport { Document } from 'langchain/document';\nimport { BufferWindowMemory } from 'langchain/memory';\nimport { getProjectRoot } from '../config/index.js';\nconst projectRootDir = getProjectRoot();",
"score": 0.8738224506378174
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " }\n return chalk.red(`All files in the memory directory have been deleted: ${error}`);\n }\n}\nasync function resetMemoryVectorStore(onReset: (newMemoryVectorStore: HNSWLib) => void) {\n const newMemoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });\n await deleteMemoryDirectory();",
"score": 0.8561716079711914
},
{
"filename": "src/config/index.ts",
"retrieved_chunk": " return {\n text: 'Loading',\n stream: output,\n discardStdin: false,\n };\n}\nconst defaultConfig: Config = {\n currentVectorStoreDatabasePath: path.join(getProjectRoot(), process.env.VECTOR_STORE_DIR || 'db/default'),\n numContextDocumentsToRetrieve: 6,\n numMemoryDocumentsToRetrieve: 4,",
"score": 0.827295184135437
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "async function getMemoryVectorStore() {\n return memoryWrapper.vectorStoreInstance;\n}\nfunction getBufferWindowMemory() {\n return bufferWindowMemory;\n}\nasync function saveMemoryVectorStore() {\n await memoryWrapper.vectorStoreInstance.save(memoryDirectory);\n}\nasync function addDocumentsToMemoryVectorStore(",
"score": 0.8246986269950867
}
] |
typescript
|
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
|
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(
|
getConfig().currentVectorStoreDatabasePath);
|
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
|
src/lib/contextManager.ts
|
gmickel-memorybot-bad0302
|
[
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "const memoryDirectory = path.join(projectRootDir, process.env.MEMORY_VECTOR_STORE_DIR || 'memory');\nlet memoryVectorStore: HNSWLib;\ntry {\n memoryVectorStore = await HNSWLib.load(memoryDirectory, new OpenAIEmbeddings());\n} catch {\n output.write(`${chalk.blue(`Creating a new memory vector store index in the ${memoryDirectory} directory`)}\\n`);\n memoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });",
"score": 0.8678246140480042
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport { HNSWLib } from 'langchain/vectorstores/hnswlib';\nimport fs from 'fs/promises';\nimport path from 'path';\nimport { stdout as output } from 'node:process';\nimport { OpenAIEmbeddings } from 'langchain/embeddings/openai';\nimport { Document } from 'langchain/document';\nimport { BufferWindowMemory } from 'langchain/memory';\nimport { getProjectRoot } from '../config/index.js';\nconst projectRootDir = getProjectRoot();",
"score": 0.8209010362625122
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " }\n return chalk.red(`All files in the memory directory have been deleted: ${error}`);\n }\n}\nasync function resetMemoryVectorStore(onReset: (newMemoryVectorStore: HNSWLib) => void) {\n const newMemoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });\n await deleteMemoryDirectory();",
"score": 0.8170706033706665
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " onReset(newMemoryVectorStore);\n}\nfunction setMemoryVectorStore(newMemoryVectorStore: HNSWLib) {\n memoryWrapper.vectorStoreInstance = newMemoryVectorStore;\n}\nexport {\n getMemoryVectorStore,\n setMemoryVectorStore,\n addDocumentsToMemoryVectorStore,\n resetMemoryVectorStore,",
"score": 0.7905101776123047
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "async function getMemoryVectorStore() {\n return memoryWrapper.vectorStoreInstance;\n}\nfunction getBufferWindowMemory() {\n return bufferWindowMemory;\n}\nasync function saveMemoryVectorStore() {\n await memoryWrapper.vectorStoreInstance.save(memoryDirectory);\n}\nasync function addDocumentsToMemoryVectorStore(",
"score": 0.7827966213226318
}
] |
typescript
|
getConfig().currentVectorStoreDatabasePath);
|
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(getConfig().currentVectorStoreDatabasePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
|
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
|
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
|
src/lib/contextManager.ts
|
gmickel-memorybot-bad0302
|
[
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "const memoryDirectory = path.join(projectRootDir, process.env.MEMORY_VECTOR_STORE_DIR || 'memory');\nlet memoryVectorStore: HNSWLib;\ntry {\n memoryVectorStore = await HNSWLib.load(memoryDirectory, new OpenAIEmbeddings());\n} catch {\n output.write(`${chalk.blue(`Creating a new memory vector store index in the ${memoryDirectory} directory`)}\\n`);\n memoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });",
"score": 0.9103102087974548
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport { HNSWLib } from 'langchain/vectorstores/hnswlib';\nimport fs from 'fs/promises';\nimport path from 'path';\nimport { stdout as output } from 'node:process';\nimport { OpenAIEmbeddings } from 'langchain/embeddings/openai';\nimport { Document } from 'langchain/document';\nimport { BufferWindowMemory } from 'langchain/memory';\nimport { getProjectRoot } from '../config/index.js';\nconst projectRootDir = getProjectRoot();",
"score": 0.8763018846511841
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " }\n return chalk.red(`All files in the memory directory have been deleted: ${error}`);\n }\n}\nasync function resetMemoryVectorStore(onReset: (newMemoryVectorStore: HNSWLib) => void) {\n const newMemoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });\n await deleteMemoryDirectory();",
"score": 0.8644837141036987
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "async function getMemoryVectorStore() {\n return memoryWrapper.vectorStoreInstance;\n}\nfunction getBufferWindowMemory() {\n return bufferWindowMemory;\n}\nasync function saveMemoryVectorStore() {\n await memoryWrapper.vectorStoreInstance.save(memoryDirectory);\n}\nasync function addDocumentsToMemoryVectorStore(",
"score": 0.827202558517456
},
{
"filename": "src/config/index.ts",
"retrieved_chunk": " return {\n text: 'Loading',\n stream: output,\n discardStdin: false,\n };\n}\nconst defaultConfig: Config = {\n currentVectorStoreDatabasePath: path.join(getProjectRoot(), process.env.VECTOR_STORE_DIR || 'db/default'),\n numContextDocumentsToRetrieve: 6,\n numMemoryDocumentsToRetrieve: 4,",
"score": 0.8243941068649292
}
] |
typescript
|
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
|
import * as cheerio from 'cheerio';
import Crawler, { CrawlerRequestResponse } from 'crawler';
import { stderr } from 'node:process';
import resolveURL from '../utils/resolveURL.js';
// import TurndownService from 'turndown';
// const turndownService = new TurndownService();
type ProgressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => void;
interface Page {
url: string;
text: string;
title: string;
}
/* The WebCrawler class is a TypeScript implementation of a web crawler that can extract text from web
pages and follow links to crawl more pages. */
class WebCrawler {
pages: Page[];
limit: number;
urls: string[];
count: number;
textLengthMinimum: number;
selector: string;
progressCallback: ProgressCallback;
crawler: Crawler;
constructor(
urls: string[],
progressCallback: ProgressCallback,
selector = 'body',
limit = 20,
textLengthMinimum = 200
) {
this.urls = urls;
this.selector = selector;
this.limit = limit;
this.textLengthMinimum = textLengthMinimum;
this.progressCallback = progressCallback;
this.count = 0;
this.pages = [];
this.crawler = new Crawler({
maxConnections: 10,
callback: this.handleRequest,
userAgent: 'node-crawler',
});
}
/* `handleRequest` is a method that handles the response of a web page request made by the `crawler`
object. It takes in three parameters: `error`, `res`, and `done`. */
handleRequest = (error: Error | null, res: CrawlerRequestResponse, done: () => void) => {
if (error) {
stderr.write(error.message);
done();
return;
}
const $ = cheerio.load(res.body);
// Remove obviously superfluous elements
$('script').remove();
$('header').remove();
$('nav').remove();
$('style').remove();
$('img').remove();
$('svg').remove();
const title = $('title').text() || '';
const text = $(this.selector).text();
// const text = turndownService.turndown(html || '');
const page: Page = {
url: res.request.uri.href,
text,
title,
};
if (text.length > this.textLengthMinimum) {
this.pages.push(page);
this.progressCallback(this.count + 1, this.pages.length, res.request.uri.href);
}
$('a').each((_i: number, elem: cheerio.Element) => {
if (this.count >= this.limit) {
return false; // Stop iterating once the limit is reached
}
const href = $(elem).attr('href')?.split('#')[0];
const uri = res.request.uri.href;
const
|
url = href && resolveURL(uri, href);
|
// crawl more
if (url && this.urls.some((u) => url.includes(u))) {
this.crawler.queue(url);
this.count += 1;
}
return true; // Continue iterating when the limit is not reached
});
done();
};
start = async () => {
this.pages = [];
return new Promise((resolve) => {
this.crawler.on('drain', () => {
resolve(this.pages);
});
this.urls.forEach((url) => {
this.crawler.queue(url);
});
});
};
}
export default WebCrawler;
|
src/lib/crawler.ts
|
gmickel-memorybot-bad0302
|
[
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": "async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {\n const dbDirectory = getConfig().currentVectorStoreDatabasePath;\n const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });\n let documents;\n try {\n addUrlSpinner.start();\n const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {\n addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;\n };\n const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);",
"score": 0.7949161529541016
},
{
"filename": "src/commands/addURLCommand.ts",
"retrieved_chunk": " To avoid this, you can try to target a specific selector such as \\`.main\\``,\n async (args, output) => {\n if (!args || args.length > 4) {\n output.write(\n chalk.red(\n 'Invalid number of arguments. Usage: /add-url `url` `selector to extract` `Maximum number of links to follow` `Ignore pages with less than n characters`\\n'\n )\n );\n return;\n }",
"score": 0.7851392030715942
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " })\n );\n addUrlSpinner.succeed();\n } catch (error) {\n addUrlSpinner.fail(chalk.red(error));\n }\n if (documents) {\n const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });\n try {\n const flattenedDocuments = documents.flat();",
"score": 0.7729886770248413
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " const pages = (await crawler.start()) as Page[];\n documents = await Promise.all(\n pages.map((row) => {\n const splitter = new RecursiveCharacterTextSplitter();\n const webDocs = splitter.splitDocuments([\n new Document({\n pageContent: row.text,\n }),\n ]);\n return webDocs;",
"score": 0.7684286832809448
},
{
"filename": "src/utils/resolveURL.ts",
"retrieved_chunk": " */\nexport default function resolve(from: string, to: string) {\n const resolvedUrl = new URL(to, new URL(from, 'resolve://'));\n if (resolvedUrl.protocol === 'resolve:') {\n // `from` is a relative URL.\n const { pathname, search, hash } = resolvedUrl;\n return pathname + search + hash;\n }\n return resolvedUrl.toString();\n}",
"score": 0.7639470100402832
}
] |
typescript
|
url = href && resolveURL(uri, href);
|
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
|
const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
|
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await logChat(chatLogDirectory, question, response.response);
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
|
src/index.ts
|
gmickel-memorybot-bad0302
|
[
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " let vectorStore: HNSWLib;\n let spinner;\n const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);\n await createDirectory(newContextVectorStorePath);\n setCurrentVectorStoreDatabasePath(newContextVectorStorePath);\n const dbDirectory = getConfig().currentVectorStoreDatabasePath;\n try {\n vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));\n output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\\n`));\n } catch {",
"score": 0.8308044075965881
},
{
"filename": "src/commands/setContextConfigCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { setNumContextDocumentsToRetrieve, getConfig } from '../config/index.js';\nconst setContextConfigCommand = createCommand(\n 'context-config',\n ['cc'],\n `Sets the number of relevant documents to return from the context vector store.\\n\n Arguments: \\`number of documents\\` (Default: 6)\\n\n Example: \\`/context-config 10\\``,\n async (args, output) => {",
"score": 0.8272796869277954
},
{
"filename": "src/commands/setMemoryConfigCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { setNumMemoryDocumentsToRetrieve, getConfig } from '../config/index.js';\nconst setMemoryConfigCommand = createCommand(\n 'memory-config',\n ['mc'],\n `Sets the number of relevant documents to return from the memory vector store.\\n\n Arguments: \\`number of documents\\` (Default: 4)\\n\n Example: /memory-config 10`,\n async (args, output) => {",
"score": 0.8261851668357849
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " const dbDirectory = getConfig().currentVectorStoreDatabasePath;\n try {\n vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));\n } catch {\n spinner = ora({\n ...defaultOraOptions,\n text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),\n }).start();\n const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');\n const filesToAdd = await getDirectoryFiles(docsDirectory);",
"score": 0.822828471660614
},
{
"filename": "src/commands/setContextConfigCommand.ts",
"retrieved_chunk": " if (!args || args.length !== 1) {\n output.write(chalk.red('Invalid number of arguments. Usage: /context-config `number of documents`\\n'));\n return;\n }\n const numContextDocumentsToRetrieve = parseInt(args[0], 10);\n setNumContextDocumentsToRetrieve(numContextDocumentsToRetrieve);\n const config = getConfig();\n output.write(chalk.blue(`Number of context documents to retrieve set to ${config.numContextDocumentsToRetrieve}`));\n }\n);",
"score": 0.8214781284332275
}
] |
typescript
|
const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
|
import { COMMENT_PRE, commentPreReg } from '../constants'
import { parsePlatform } from './parsePlatform'
export function parseComment(code: string) {
if (code.trim().length === 0)
return
const commentResults = [...code.matchAll(commentPreReg)]
if (commentResults.length === 0)
return
const commentAST = []
for (let i = 0; i < commentResults.length; i++) {
const item = commentResults[i]
const index = item.index!
const [self, commentPre, _space, prefix, _platform] = item
if (!COMMENT_PRE.includes(commentPre))
continue
const platform = _platform.trim()
if (platform && prefix !== '#endif') {
const prefixStart = self.indexOf(prefix) + index
const prefixEnd = prefixStart + prefix.length
commentAST.push({
start: prefixStart,
end: prefixEnd,
type: 'prefix',
row: prefix,
})
const platforms = parsePlatform(platform, commentPre)
if (!platforms)
continue
if (platforms.length > 1) {
const orRex = /\|\|/g
const orResult = [...platform.matchAll(orRex)]
const offset = index + self.indexOf(_platform) + 1
orResult.forEach((element) => {
const orStart = offset + element.index!
const orEnd = orStart + 2
commentAST.push({
start: orStart,
end: orEnd,
type: 'prefix',
row: element[0],
})
})
}
platforms.forEach(
|
(element) => {
|
const platformStart = self.indexOf(element) + index
const platformEnd = platformStart + element.length
commentAST.push({
start: platformStart,
end: platformEnd,
type: 'platform',
row: element,
})
})
}
else {
const start = self.indexOf(prefix) + index
const end = start + prefix.length
commentAST.push({
start,
end,
row: prefix,
type: 'prefix',
})
}
}
return commentAST
}
|
src/parseComment/index.ts
|
uni-helper-uni-highlight-vscode-f9002ae
|
[
{
"filename": "src/getPlatformInfo.ts",
"retrieved_chunk": " const { start, end, type, row } = item\n const color = HIGHTLIGHT_COLOR.platform[row as Platform]\n if (type === 'prefix') {\n platformInfos.push({\n start,\n end,\n type,\n })\n }\n else if (type === 'platform' && color) {",
"score": 0.8405432105064392
},
{
"filename": "src/transformPlatform.ts",
"retrieved_chunk": " const { start, end, row, color } = platformInfo\n const range = new Range(\n editor.document.positionAt(start),\n editor.document.positionAt(end),\n )\n if (platformInfo.type === 'prefix')\n highlightRange.prefix.push(range)\n if (platformInfo.type === 'platform') {\n if (!highlightRange.platform[color])\n highlightRange.platform[color] = []",
"score": 0.8340054750442505
},
{
"filename": "src/getPlatformInfo.ts",
"retrieved_chunk": " platformInfos.push({\n start,\n end,\n type,\n color,\n })\n }\n else if (type === 'platform' && !color) {\n platformInfos.push({\n start,",
"score": 0.8296719789505005
},
{
"filename": "src/parseComment/parsePlatform.ts",
"retrieved_chunk": "import { commentSufReg } from '../constants'\nexport function parsePlatform(platform: string, commentPre: string): string[] {\n let platforms: string[]\n if (commentPre !== '//') {\n const PlatformResult = [...platform.matchAll(commentSufReg)][0]\n if (!PlatformResult)\n return []\n const [_self, _platform, _commentSuf] = PlatformResult\n platform = _platform.trim()\n }",
"score": 0.8234587907791138
},
{
"filename": "src/getPlatformInfo.ts",
"retrieved_chunk": "import type { Platform } from './constants'\nimport { HIGHTLIGHT_COLOR } from './constants'\nimport { parseComment } from './parseComment'\nexport function getPlatformInfo(code: string): PlatformInfo[] {\n const commentAST = parseComment(code)\n if (!commentAST)\n return []\n const platformInfos = []\n for (let i = 0; i < commentAST.length; i++) {\n const item = commentAST[i]",
"score": 0.8161441087722778
}
] |
typescript
|
(element) => {
|
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
|
await logChat(chatLogDirectory, question, response.response);
|
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
|
src/index.ts
|
gmickel-memorybot-bad0302
|
[
{
"filename": "src/chatLogger.ts",
"retrieved_chunk": "const getLogFilename = (): string => {\n const currentDate = new Date();\n const year = currentDate.getFullYear();\n const month = String(currentDate.getMonth() + 1).padStart(2, '0');\n const day = String(currentDate.getDate()).padStart(2, '0');\n return `${year}-${month}-${day}.json`;\n};\nconst logChat = async (logDirectory: string, question: string, answer: string): Promise<void> => {\n const timestamp = new Date().toISOString();\n const chatHistory: ChatHistory = { timestamp, question, answer };",
"score": 0.8570204973220825
},
{
"filename": "src/chatLogger.ts",
"retrieved_chunk": " const logFilename = getLogFilename();\n const logFilePath = path.join(logDirectory, logFilename);\n ensureLogDirectory(logDirectory);\n if (!fs.existsSync(logFilePath)) {\n await fs.writeJson(logFilePath, [chatHistory]);\n } else {\n const chatHistoryArray = await fs.readJson(logFilePath);\n chatHistoryArray.push(chatHistory);\n await fs.writeJson(logFilePath, chatHistoryArray);\n }",
"score": 0.8440454602241516
},
{
"filename": "src/chatLogger.ts",
"retrieved_chunk": "import fs from 'fs-extra';\nimport path from 'path';\ninterface ChatHistory {\n timestamp: string;\n question: string;\n answer: string;\n}\nconst ensureLogDirectory = (logDirectory: string): void => {\n fs.ensureDirSync(logDirectory);\n};",
"score": 0.8250676393508911
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "}\nconst bufferWindowMemory = new BufferWindowMemory({\n returnMessages: false,\n memoryKey: 'immediate_history',\n inputKey: 'input',\n k: 2,\n});\nconst memoryWrapper = {\n vectorStoreInstance: memoryVectorStore,\n};",
"score": 0.8149712681770325
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " pageContent: text,\n }),\n ]);\n const vectorStore = await getContextVectorStore();\n await vectorStore.addDocuments(videoDocs);\n await vectorStore.save(dbDirectory);\n spinner.succeed();\n return;\n } catch (error) {\n if (spinner) {",
"score": 0.8141592741012573
}
] |
typescript
|
await logChat(chatLogDirectory, question, response.response);
|
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(getConfig().currentVectorStoreDatabasePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
const flattenedDocuments = documents.reduce(
|
(acc, val) => acc.concat(val), []);
|
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
|
src/lib/contextManager.ts
|
gmickel-memorybot-bad0302
|
[
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "const memoryDirectory = path.join(projectRootDir, process.env.MEMORY_VECTOR_STORE_DIR || 'memory');\nlet memoryVectorStore: HNSWLib;\ntry {\n memoryVectorStore = await HNSWLib.load(memoryDirectory, new OpenAIEmbeddings());\n} catch {\n output.write(`${chalk.blue(`Creating a new memory vector store index in the ${memoryDirectory} directory`)}\\n`);\n memoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });",
"score": 0.8493415713310242
},
{
"filename": "src/commands/addDocumentCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { addDocument } from '../lib/contextManager.js';\nconst addDocumentCommand = createCommand(\n 'add-docs',\n ['docs'],\n `Adds new documents from your configured docs directory to the context vector store.\\n\n Usage: /add-docs example.txt example.md\\n\n Supports the following file types: .txt, .md, .pdf, .docx, .csv, .epub`,\n async (args: string[], output: NodeJS.WriteStream) => {",
"score": 0.8331298828125
},
{
"filename": "src/config/index.ts",
"retrieved_chunk": " return {\n text: 'Loading',\n stream: output,\n discardStdin: false,\n };\n}\nconst defaultConfig: Config = {\n currentVectorStoreDatabasePath: path.join(getProjectRoot(), process.env.VECTOR_STORE_DIR || 'db/default'),\n numContextDocumentsToRetrieve: 6,\n numMemoryDocumentsToRetrieve: 4,",
"score": 0.8281310796737671
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport { HNSWLib } from 'langchain/vectorstores/hnswlib';\nimport fs from 'fs/promises';\nimport path from 'path';\nimport { stdout as output } from 'node:process';\nimport { OpenAIEmbeddings } from 'langchain/embeddings/openai';\nimport { Document } from 'langchain/document';\nimport { BufferWindowMemory } from 'langchain/memory';\nimport { getProjectRoot } from '../config/index.js';\nconst projectRootDir = getProjectRoot();",
"score": 0.8270758986473083
},
{
"filename": "src/commands/switchContextStoreCommand.ts",
"retrieved_chunk": " if (!args || args.length !== 1) {\n output.write(chalk.red('Invalid number of arguments. Usage: /change-context-store `subdirectory`\\n'));\n return;\n }\n const subDirectory = args[0];\n await loadOrCreateEmptyVectorStore(subDirectory);\n }\n);\nexport default changeContextStoreCommand;",
"score": 0.8235146403312683
}
] |
typescript
|
(acc, val) => acc.concat(val), []);
|
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(getConfig().currentVectorStoreDatabasePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
|
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
|
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
|
src/lib/contextManager.ts
|
gmickel-memorybot-bad0302
|
[
{
"filename": "src/commands/addURLCommand.ts",
"retrieved_chunk": " const url = args[0];\n const selector = args[1];\n const maxLinks = parseInt(args[2], 10) || 20;\n const minChars = parseInt(args[3], 10) || 200;\n await addURL(url, selector, maxLinks, minChars);\n }\n);\nexport default addURLCommand;",
"score": 0.8695529103279114
},
{
"filename": "src/commands/addURLCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { addURL } from '../lib/contextManager.js';\nconst addURLCommand = createCommand(\n 'add-url',\n ['url'],\n `Scrapes the content from a url and adds it to the context vector store.\\n\n Arguments: \\`url\\`, \\`selector to extract\\` (Default: body), \\`Maximum number of links to follow\\` (Default: 20), \\`Ignore pages with less than n characters\\` (Default: 200)\\n\n Example: /add-url https://dociq.io main 10 500\\n\n This operation may try to generate a large number of embeddings depending on the structure of the web pages and may lead to rate-limiting.\\n",
"score": 0.8576883673667908
},
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": " selector: string;\n progressCallback: ProgressCallback;\n crawler: Crawler;\n constructor(\n urls: string[],\n progressCallback: ProgressCallback,\n selector = 'body',\n limit = 20,\n textLengthMinimum = 200\n ) {",
"score": 0.8500675559043884
},
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": "import * as cheerio from 'cheerio';\nimport Crawler, { CrawlerRequestResponse } from 'crawler';\nimport { stderr } from 'node:process';\nimport resolveURL from '../utils/resolveURL.js';\n// import TurndownService from 'turndown';\n// const turndownService = new TurndownService();\ntype ProgressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => void;\ninterface Page {\n url: string;\n text: string;",
"score": 0.8499276638031006
},
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": " title: string;\n}\n/* The WebCrawler class is a TypeScript implementation of a web crawler that can extract text from web\npages and follow links to crawl more pages. */\nclass WebCrawler {\n pages: Page[];\n limit: number;\n urls: string[];\n count: number;\n textLengthMinimum: number;",
"score": 0.8408596515655518
}
] |
typescript
|
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
|
import {isNoScenamatica} from "../utils.js"
import {deployPlugin} from "./deployer.js"
import {kill, onDataReceived} from "./client";
import type {ChildProcess} from "node:child_process";
import {spawn} from "node:child_process";
import type {Writable} from "node:stream";
import * as fs from "node:fs";
import path from "node:path";
import {info, setFailed, warning} from "@actions/core";
import {printFooter} from "../outputs/summary";
let serverProcess: ChildProcess | undefined
let serverStdin: Writable | undefined
const genArgs = (executable: string, args: string[]) => {
return [
...args,
"-jar",
executable,
"nogui"
]
}
const createServerProcess = (workDir: string, executable: string, args: string[] = []) => {
const cp = spawn(
"java",
genArgs(executable, args),
{
cwd: workDir
}
)
serverStdin = cp.stdin
serverProcess = cp
return cp
}
export const startServerOnly = async (workDir: string, executable: string, args: string[] = []) => {
info(`Starting server with executable ${executable} and args ${args.join(" ")}`)
const cp = createServerProcess(workDir, executable, args)
cp.stdout.on("data", (data: Buffer) => {
const line = data.toString("utf8")
if (line.includes("Done") && line.includes("For help, type \"help\""))
serverStdin?.write("stop\n")
if (line.endsWith("\n"))
info(line.slice(0, - 1))
else
info(line)
})
return new Promise<number>((resolve, reject) => {
cp.on("exit", (code) => {
if (code === 0)
resolve(code)
else
reject(code)
})
})
}
export const stopServer = () => {
if (!serverStdin || !serverProcess)
return
info("Stopping server...")
serverStdin.write("stop\n")
setTimeout(() => {
if (serverProcess!.killed)
return
warning("Server didn't stop in time, killing it...")
serverProcess?.kill("SIGKILL")
}, 1000 * 20)
}
export const startTests = async (serverDir: string, executable: string, pluginFile: string) => {
info(`Starting tests of plugin ${pluginFile}.`)
if (isNoScenamatica())
await removeScenamatica(serverDir)
await deployPlugin(serverDir, pluginFile)
const cp = createServerProcess(serverDir, executable)
cp.stdout.on("data", async (data: Buffer) => {
await
|
onDataReceived(data.toString("utf8"))
})
}
|
const removeScenamatica = async (serverDir: string) => {
info("Removing Scenamatica from server...")
const pluginDir = path.join(serverDir, "plugins")
const files = await fs.promises.readdir(pluginDir)
for (const file of files) {
if (file.includes("Scenamatica") && file.endsWith(".jar")) {
info(`Removing ${file}...`)
await fs.promises.rm(path.join(pluginDir, file))
}
}
}
export const endTests = async (succeed: boolean) => {
info("Ending tests, shutting down server...")
kill()
stopServer()
await printFooter()
let code: number
if (succeed) {
info("Tests succeeded")
code = 0
} else {
setFailed("Tests failed")
code = 1
}
process.exit(code)
}
|
src/server/controller.ts
|
TeamKun-scenamatica-action-6f66283
|
[
{
"filename": "src/main.ts",
"retrieved_chunk": " initPRMode(pullRequest, githubToken)\n }\n if (!fs.existsSync(pluginFile)) {\n setFailed(`Plugin file ${pluginFile} does not exist`)\n return\n }\n const paper = await deployServer(serverDir, javaVersion, mcVersion, scenamaticaVersion)\n info(\"Starting tests...\")\n await startTests(serverDir, paper, pluginFile)\n}",
"score": 0.9190676212310791
},
{
"filename": "src/server/deployer.ts",
"retrieved_chunk": "export const deployPlugin = async (serverDir: string, pluginFile: string) => {\n const pluginDir = path.join(serverDir, \"plugins\")\n await io.mkdirP(pluginDir)\n await io.cp(pluginFile, pluginDir)\n}\nconst initScenamaticaConfig = async (configDir: string, scenamaticaVersion: string) => {\n const configPath = path.join(configDir, \"config.yml\")\n const configData = yaml.load(await fs.promises.readFile(configPath, \"utf8\")) as {\n interfaces?: {\n raw: boolean",
"score": 0.8830567598342896
},
{
"filename": "src/server/deployer.ts",
"retrieved_chunk": " return false\n }\n}\nexport const deployServer = async (\n dir: string,\n javaVersion: string,\n mcVersion: string,\n scenamaticaVersion: string,\n): Promise<string> => {\n const pluginDir = path.join(dir, \"plugins\")",
"score": 0.8304149508476257
},
{
"filename": "src/main.ts",
"retrieved_chunk": "import * as fs from \"node:fs\"\nimport { deployServer } from \"./server/deployer.js\"\nimport { startTests } from \"./server/controller.js\"\nimport type { Args } from \"./utils.js\"\nimport { getArguments } from \"./utils.js\"\nimport {info, setFailed} from \"@actions/core\";\nimport {context, getOctokit} from \"@actions/github\";\nimport type {PullRequestInfo} from \"./outputs/pull-request/appender\";\nimport {initPullRequest} from \"./server/client\";\nconst main = async (): Promise<void> => {",
"score": 0.8275740742683411
},
{
"filename": "src/server/deployer.ts",
"retrieved_chunk": " // Paper のダウンロード\n await io.mkdirP(pluginDir)\n await downloadLatestPaper(dir, mcVersion)\n await downloadScenamatica(pluginDir, scenamaticaVersion)\n await writeEula(dir) // eula.txt を書き込まないと Paper が起動Vしない\n await startServerOnly(dir, PAPER_NAME)\n await initScenamaticaConfig(path.join(pluginDir, \"Scenamatica\"), scenamaticaVersion)\n await cache.saveCache([dir], genCacheKey(javaVersion, mcVersion, scenamaticaVersion))\n return PAPER_NAME\n}",
"score": 0.7940378189086914
}
] |
typescript
|
onDataReceived(data.toString("utf8"))
})
}
|
import {isNoScenamatica} from "../utils.js"
import {deployPlugin} from "./deployer.js"
import {kill, onDataReceived} from "./client";
import type {ChildProcess} from "node:child_process";
import {spawn} from "node:child_process";
import type {Writable} from "node:stream";
import * as fs from "node:fs";
import path from "node:path";
import {info, setFailed, warning} from "@actions/core";
import {printFooter} from "../outputs/summary";
let serverProcess: ChildProcess | undefined
let serverStdin: Writable | undefined
const genArgs = (executable: string, args: string[]) => {
return [
...args,
"-jar",
executable,
"nogui"
]
}
const createServerProcess = (workDir: string, executable: string, args: string[] = []) => {
const cp = spawn(
"java",
genArgs(executable, args),
{
cwd: workDir
}
)
serverStdin = cp.stdin
serverProcess = cp
return cp
}
export const startServerOnly = async (workDir: string, executable: string, args: string[] = []) => {
info(`Starting server with executable ${executable} and args ${args.join(" ")}`)
const cp = createServerProcess(workDir, executable, args)
cp.stdout.on("data", (data: Buffer) => {
const line = data.toString("utf8")
if (line.includes("Done") && line.includes("For help, type \"help\""))
serverStdin?.write("stop\n")
if (line.endsWith("\n"))
info(line.slice(0, - 1))
else
info(line)
})
return new Promise<number>((resolve, reject) => {
cp.on("exit", (code) => {
if (code === 0)
resolve(code)
else
reject(code)
})
})
}
export const stopServer = () => {
if (!serverStdin || !serverProcess)
return
info("Stopping server...")
serverStdin.write("stop\n")
setTimeout(() => {
if (serverProcess!.killed)
return
warning("Server didn't stop in time, killing it...")
serverProcess?.kill("SIGKILL")
}, 1000 * 20)
}
export const startTests = async (serverDir: string, executable: string, pluginFile: string) => {
info(`Starting tests of plugin ${pluginFile}.`)
if (isNoScenamatica())
await removeScenamatica(serverDir)
await deployPlugin(serverDir, pluginFile)
const cp = createServerProcess(serverDir, executable)
cp.stdout.on("data", async (data: Buffer) => {
await onDataReceived(data.toString("utf8"))
})
}
const removeScenamatica = async (serverDir: string) => {
info("Removing Scenamatica from server...")
const pluginDir = path.join(serverDir, "plugins")
const files = await fs.promises.readdir(pluginDir)
for (const file of files) {
if (file.includes("Scenamatica") && file.endsWith(".jar")) {
info(`Removing ${file}...`)
await fs.promises.rm(path.join(pluginDir, file))
}
}
}
export const endTests = async (succeed: boolean) => {
info("Ending tests, shutting down server...")
kill()
stopServer()
await
|
printFooter()
let code: number
if (succeed) {
|
info("Tests succeeded")
code = 0
} else {
setFailed("Tests failed")
code = 1
}
process.exit(code)
}
|
src/server/controller.ts
|
TeamKun-scenamatica-action-6f66283
|
[
{
"filename": "src/server/client.ts",
"retrieved_chunk": " }\n case \"end\": {\n const sessionEnd = packet as PacketSessionEnd\n logSessionEnd(sessionEnd)\n await publishSessionEnd(sessionEnd)\n if (prInfo)\n await publishPRComment(prInfo)\n await endTests(isTestSucceed(sessionEnd.results))\n break\n }",
"score": 0.8331459760665894
},
{
"filename": "src/logging.ts",
"retrieved_chunk": " }\n}\nexport const logSessionStart = (startedAt: number, tests: number): void => {\n info(\"--------------------------------------\")\n info(\" T E S T S\")\n info(\"--------------------------------------\")\n info(`The session is started at ${startedAt}, ${tests} tests are marked to be run.`)\n}\nexport const logSessionEnd = (sessionEnd: PacketSessionEnd): void => {\n const elapsed = `${Math.ceil((sessionEnd.finishedAt - sessionEnd.startedAt) / 1000)} sec`",
"score": 0.8217377662658691
},
{
"filename": "src/outputs/summary.ts",
"retrieved_chunk": "const printSummary = async (sessionEnd: PacketSessionEnd) => {\n const {results, finishedAt, startedAt} = sessionEnd\n summary.addRaw(getHeader(false))\n summary.addRaw(getTestSummary(results, startedAt, finishedAt))\n summary.addRaw(getTestResultTable(results))\n await summary.write()\n}\nlet errorHeaderPrinted = false\nlet errorReportingMessagePrinted = false\nconst printErrorSummary = async (errorType: string, errorMessage: string, errorStackTrace: string[]) => {",
"score": 0.8141056299209595
},
{
"filename": "src/outputs/pull-request/appender.ts",
"retrieved_chunk": "}\nexport const reportSessionEnd = (packet: PacketSessionEnd) => {\n const {results, finishedAt, startedAt} = packet\n appendHeaderIfNotPrinted()\n outMessage += `${getTestSummary(results, startedAt, finishedAt)}\n ${getTestResultTable(results, true)}\n `\n}\nconst appendHeaderIfNotPrinted = () => {\n if (!headerPrinted) {",
"score": 0.8099034428596497
},
{
"filename": "src/logging.ts",
"retrieved_chunk": " const skipped = results.filter((t) => t.cause === TestResultCause.SKIPPED).length\n info(`\\nResults:\\n`)\n info(`Tests run: ${total}, Failures: ${failures}, Skipped: ${skipped}, Time elapsed: ${elapsed}\\n`)\n}",
"score": 0.8057901263237
}
] |
typescript
|
printFooter()
let code: number
if (succeed) {
|
import {isNoScenamatica} from "../utils.js"
import {deployPlugin} from "./deployer.js"
import {kill, onDataReceived} from "./client";
import type {ChildProcess} from "node:child_process";
import {spawn} from "node:child_process";
import type {Writable} from "node:stream";
import * as fs from "node:fs";
import path from "node:path";
import {info, setFailed, warning} from "@actions/core";
import {printFooter} from "../outputs/summary";
let serverProcess: ChildProcess | undefined
let serverStdin: Writable | undefined
const genArgs = (executable: string, args: string[]) => {
return [
...args,
"-jar",
executable,
"nogui"
]
}
const createServerProcess = (workDir: string, executable: string, args: string[] = []) => {
const cp = spawn(
"java",
genArgs(executable, args),
{
cwd: workDir
}
)
serverStdin = cp.stdin
serverProcess = cp
return cp
}
export const startServerOnly = async (workDir: string, executable: string, args: string[] = []) => {
info(`Starting server with executable ${executable} and args ${args.join(" ")}`)
const cp = createServerProcess(workDir, executable, args)
cp.stdout.on("data", (data: Buffer) => {
const line = data.toString("utf8")
if (line.includes("Done") && line.includes("For help, type \"help\""))
serverStdin?.write("stop\n")
if (line.endsWith("\n"))
info(line.slice(0, - 1))
else
info(line)
})
return new Promise<number>((resolve, reject) => {
cp.on("exit", (code) => {
if (code === 0)
resolve(code)
else
reject(code)
})
})
}
export const stopServer = () => {
if (!serverStdin || !serverProcess)
return
info("Stopping server...")
serverStdin.write("stop\n")
setTimeout(() => {
if (serverProcess!.killed)
return
warning("Server didn't stop in time, killing it...")
serverProcess?.kill("SIGKILL")
}, 1000 * 20)
}
export const startTests = async (serverDir: string, executable: string, pluginFile: string) => {
info(`Starting tests of plugin ${pluginFile}.`)
if (isNoScenamatica())
await removeScenamatica(serverDir)
await deployPlugin(serverDir, pluginFile)
const cp = createServerProcess(serverDir, executable)
cp.stdout.on("data", async (data: Buffer) => {
await onDataReceived(data.toString("utf8"))
})
}
const removeScenamatica = async (serverDir: string) => {
info("Removing Scenamatica from server...")
const pluginDir = path.join(serverDir, "plugins")
const files = await fs.promises.readdir(pluginDir)
for (const file of files) {
if (file.includes("Scenamatica") && file.endsWith(".jar")) {
info(`Removing ${file}...`)
await fs.promises.rm(path.join(pluginDir, file))
}
}
}
export const endTests = async (succeed: boolean) => {
info("Ending tests, shutting down server...")
kill()
stopServer()
|
await printFooter()
let code: number
if (succeed) {
|
info("Tests succeeded")
code = 0
} else {
setFailed("Tests failed")
code = 1
}
process.exit(code)
}
|
src/server/controller.ts
|
TeamKun-scenamatica-action-6f66283
|
[
{
"filename": "src/server/client.ts",
"retrieved_chunk": " }\n case \"end\": {\n const sessionEnd = packet as PacketSessionEnd\n logSessionEnd(sessionEnd)\n await publishSessionEnd(sessionEnd)\n if (prInfo)\n await publishPRComment(prInfo)\n await endTests(isTestSucceed(sessionEnd.results))\n break\n }",
"score": 0.8374651670455933
},
{
"filename": "src/logging.ts",
"retrieved_chunk": " }\n}\nexport const logSessionStart = (startedAt: number, tests: number): void => {\n info(\"--------------------------------------\")\n info(\" T E S T S\")\n info(\"--------------------------------------\")\n info(`The session is started at ${startedAt}, ${tests} tests are marked to be run.`)\n}\nexport const logSessionEnd = (sessionEnd: PacketSessionEnd): void => {\n const elapsed = `${Math.ceil((sessionEnd.finishedAt - sessionEnd.startedAt) / 1000)} sec`",
"score": 0.8269726037979126
},
{
"filename": "src/outputs/summary.ts",
"retrieved_chunk": "const printSummary = async (sessionEnd: PacketSessionEnd) => {\n const {results, finishedAt, startedAt} = sessionEnd\n summary.addRaw(getHeader(false))\n summary.addRaw(getTestSummary(results, startedAt, finishedAt))\n summary.addRaw(getTestResultTable(results))\n await summary.write()\n}\nlet errorHeaderPrinted = false\nlet errorReportingMessagePrinted = false\nconst printErrorSummary = async (errorType: string, errorMessage: string, errorStackTrace: string[]) => {",
"score": 0.819865882396698
},
{
"filename": "src/outputs/pull-request/appender.ts",
"retrieved_chunk": "}\nexport const reportSessionEnd = (packet: PacketSessionEnd) => {\n const {results, finishedAt, startedAt} = packet\n appendHeaderIfNotPrinted()\n outMessage += `${getTestSummary(results, startedAt, finishedAt)}\n ${getTestResultTable(results, true)}\n `\n}\nconst appendHeaderIfNotPrinted = () => {\n if (!headerPrinted) {",
"score": 0.8159776329994202
},
{
"filename": "src/outputs/summary.ts",
"retrieved_chunk": "}\nconst printFooter = async () => {\n summary.addRaw(getFooter())\n await summary.write()\n}\nexport { printSummary, printErrorSummary, printFooter }",
"score": 0.8103114366531372
}
] |
typescript
|
await printFooter()
let code: number
if (succeed) {
|
import {extractTestResults, getArguments} from "../utils";
import type {PacketTestEnd} from "../packets";
import {getEmojiForCause} from "../logging";
const MESSAGES_PASSED = [
":tada: Congrats! All tests passed! :star2:",
":raised_hands: High-five! You nailed all the tests! :tada::tada:",
":confetti_ball: Hooray! Everything's working perfectly! :tada::confetti_ball:",
":100: Perfect score! All tests passed with flying colors! :rainbow::clap:",
":thumbsup: Great job! All tests passed without a hitch! :rocket::star2:",
":metal: Rock on! All tests passed flawlessly! :guitar::metal:",
":partying_face: Celebrate good times! All tests passed with flying colors! :tada::confetti_ball::balloon:",
":muscle: You crushed it! All tests passed with ease! :fire::muscle:",
":1st_place_medal: Gold medal performance! All tests passed with flying colors! :1st_place_medal::star2:",
":champagne: Pop the champagne! All tests passed, time to celebrate! :champagne::tada:"
];
const MESSAGES_NO_TESTS = [
"Alright, who forgot to write tests? :face_with_raised_eyebrow:",
"No tests? Time to break out the crystal ball. :crystal_ball:",
"Tests? Who writes tests? :person_shrugging:",
"No tests found. Did they run away? :man_running: :woman_running:",
"No tests, no glory. :trophy:",
"Tests? We don't need no stinkin' tests! :shushing_face:",
"No tests? I guess we'll just have to wing it. :eagle:",
"You get a test, and you get a test! Everybody gets a test! :gift: :tada:",
"No tests? That's impossible! :dizzy_face:",
"Tests make the code go round. :carousel_horse:"
];
const MESSAGES_FAILED = [
"Oops! Something went wrong! :scream_cat:",
"Oh no! The tests have betrayed us! :scream:",
"Houston, we have a problem. :rocket:",
"Looks like we have some debugging to do. :beetle:",
"Failures? More like opportunities to improve! :muscle:",
"This is not the result we were looking for. :confused:",
"Looks like we need to rethink our strategy. :thinking:",
"Don't worry, we'll get 'em next time! :sunglasses:",
"Keep calm and debug on. :female_detective:",
"The only way is up from here! :rocket:"
];
const MESSAGES_PASSED_WITH_THRESHOLD = [
"Tests passed, but some are being rebellious. Debug mode: ON! :microscope:",
"Almost there! Some tests failed, but hey, progress is progress! :turtle:",
"Good news: most tests passed. Bad news: a few had different plans. Let's fix 'em! :hammer:",
"We're on the right track, but some tests are playing hard to get. Challenge accepted! :muscle:",
"Tests went well overall, but we have a few stubborn failures. Time for some gentle persuasion! :wrench:",
"Success with a side of failures. It's like a bittersweet symphony. Let's sweeten it up! :musical_note:",
"We're soaring high, but some tests got left behind. Time to reel them back in! :fishing_pole_and_fish:",
"Great progress, but we've got some test gremlins causing trouble. Let's send them packing! :imp:",
"Victory is ours, with a sprinkle of defeat. Let's conquer those pesky failures! :crossed_swords:",
"We're almost there, but a few tests are being rebellious. Let's bring them back to the flock! :sheep:"
];
const REPORT_URL = "https://github.com/TeamKun/Scenamatica/issues/new?assignees=PeyaPeyaPeyang&labels=Type%3A+Bug&projects=&template=bug_report.yml&title=%E3%80%90%E3%83%90%E3%82%B0%E3%80%91"
export const getHeader = (isError: boolean) => {
const result = [ wrap("h1", "Scenamatica"), wrap("h2", "Summary"), "<hr />"]
if (isError) {
result.push(
wrap("h4", ":no_entry: ERROR!!"),
wrap("p", "An unexpected error occurred while running the server and Scenamatica daemon."),
wrap("h2", "Error details")
)
}
return joinLine(...result)
}
export const getRunningMessage = () => {
const messages = [
wrap("h4", ":hourglass_flowing_sand: Hey there! :wave: We're currently testing your plugin."),
wrap("p", "The testing process may take some time, but we'll update this message once it's complete.")
]
return joinLine(...messages)
}
|
export const getTestSummary = (results: PacketTestEnd[], startedAt: number, finishedAt: number) => {
|
const elapsed = (finishedAt - startedAt) / 1000
const {
total,
passed,
failures,
skipped,
cancelled
} = extractTestResults(results)
return joinLine(
getSummaryHeader(total, elapsed, passed, failures, skipped, cancelled),
"<hr />",
wrap("h2", "Details")
)
}
export const getTestResultTable = (results: PacketTestEnd[], minimize = false) => {
const header = wrap("thead", joinLine(
wrap("tr", joinLine(
wrap("th", " "),
wrap("th", "Test"),
wrap("th", "Cause"),
wrap("th", "State"),
wrap("th", "Started at"),
wrap("th", "Finished at"),
wrap("th", "Elapsed"),
wrap("th", "Test description")
))
)
)
const body = wrap("tbody", joinLine(...results.map((result) => {
const {
cause,
state,
scenario,
startedAt,
finishedAt
} = result
const emoji = getEmojiForCause(cause)
const { name } = scenario
const startedAtStr = new Date(startedAt).toLocaleString()
const finishedAtStr = new Date(finishedAt).toLocaleString()
const testElapsed = `${Math.ceil((finishedAt - startedAt) / 1000)} sec`
const description = scenario.description || "No description"
return wrap("tr", joinLine(
wrap("td", emoji),
wrap("td", name),
wrap("td", cause),
wrap("td", state),
wrap("td", startedAtStr),
wrap("td", finishedAtStr),
wrap("td", testElapsed),
wrap("td", description)
))
}))
)
const table = wrap("table", joinLine(header, body))
if (minimize)
return wrap("details", joinLine(
wrap("summary", "Full test results"),
table
))
return table
}
const getSummaryHeader = (total: number, elapsed: number, passed: number, failures: number, skipped: number, cancelled: number) => {
const threshold = getArguments().failThreshold
let messageSource: string[]
if (total === passed + skipped) messageSource = MESSAGES_PASSED
else if (failures === 0) messageSource = MESSAGES_NO_TESTS
else if (failures <= threshold) messageSource = MESSAGES_PASSED_WITH_THRESHOLD
else messageSource = MESSAGES_FAILED
const summaryText = messageSource[Math.floor(Math.random() * messageSource.length)]
return joinLine(
wrap("h4", summaryText),
"<br />",
wrap("p", join(", ",
`Tests run: ${total}`,
`Failures: ${failures}`,
`Skipped: ${skipped}`,
`Cancelled: ${cancelled}`,
`Time elapsed: ${elapsed} sec`
))
)
}
export const getExceptionString = (errorType: string, errorMessage: string, errorStackTrace: string[]) => {
return wrap("pre", wrap("code", joinLine(
"An unexpected error has occurred while running Scenamatica daemon:",
`${errorType}: ${errorMessage}`,
...errorStackTrace.map((s) => ` at ${s}`)
)
))
}
export const getReportingMessage = () => {
return joinLine(
wrap("h2", "Reporting bugs"),
wrap("p", combine(
"If you believe this is a bug, please report it to ",
wrap("a", "Scenamatica", { href: REPORT_URL }),
" along with the contents of this error message, the above stack trace, and the environment information listed below."
)),
getEnvInfoMessage()
)
}
export const getFooter = () => {
return joinLine(
"<hr />",
getLicenseMessage()
)
}
const getEnvInfoMessage = () => {
const runArgs = getArguments()
const envInfo = [
"+ Versions:",
` - Scenamatica: ${runArgs.scenamaticaVersion}`,
` - Minecraft: ${runArgs.mcVersion}`,
` - Java: ${runArgs.javaVersion}`,
` - Node.js: ${process.version}`,
"+ Runner:",
` - OS: ${process.platform}`,
` - Arch: ${process.arch}`,
]
return wrap("details", joinLine(
wrap("summary", "Environment Information"),
wrap("pre", wrap("code", envInfo.join("\n")))
))
}
const getLicenseMessage = () => {
return joinLine(
wrap("h2" , "License"),
wrap("small", `This test report has been generated by ${
wrap("a", "Scenamatica", { href: "https://github.com/TeamKUN/Scenamatica" })
} and licensed under ${
wrap("a", "MIT License", { href: "https://github.com/TeamKUN/Scenamatica/blob/main/LICENSE" })
}.`),
"<br />",
wrap("small", "You can redistribute it and/or modify it under the terms of the MIT License.")
)
}
const wrap = (tag: string, text: string, props: { [key: string]: string } = {}) => {
const attributes = Object.entries(props).map(([key, value]) => `${key}="${value}"`).join(" ")
return `<${tag} ${attributes}>${text}</${tag}>`
}
const joinLine = (...texts: string[]) => {
return texts.join("\n")
}
const join = (delimiter: string, ...texts: string[]) => {
return texts.join(delimiter)
}
const combine = (...texts: string[]) => {
return texts.join("")
}
|
src/outputs/messages.ts
|
TeamKun-scenamatica-action-6f66283
|
[
{
"filename": "src/outputs/summary.ts",
"retrieved_chunk": "import type {PacketSessionEnd} from \"../packets\"\nimport {summary} from \"@actions/core\";\nimport {\n getExceptionString,\n getFooter,\n getHeader,\n getReportingMessage,\n getTestResultTable,\n getTestSummary\n} from \"./messages\";",
"score": 0.8443947434425354
},
{
"filename": "src/logging.ts",
"retrieved_chunk": " }\n}\nexport const logSessionStart = (startedAt: number, tests: number): void => {\n info(\"--------------------------------------\")\n info(\" T E S T S\")\n info(\"--------------------------------------\")\n info(`The session is started at ${startedAt}, ${tests} tests are marked to be run.`)\n}\nexport const logSessionEnd = (sessionEnd: PacketSessionEnd): void => {\n const elapsed = `${Math.ceil((sessionEnd.finishedAt - sessionEnd.startedAt) / 1000)} sec`",
"score": 0.8435865640640259
},
{
"filename": "src/outputs/summary.ts",
"retrieved_chunk": "const printSummary = async (sessionEnd: PacketSessionEnd) => {\n const {results, finishedAt, startedAt} = sessionEnd\n summary.addRaw(getHeader(false))\n summary.addRaw(getTestSummary(results, startedAt, finishedAt))\n summary.addRaw(getTestResultTable(results))\n await summary.write()\n}\nlet errorHeaderPrinted = false\nlet errorReportingMessagePrinted = false\nconst printErrorSummary = async (errorType: string, errorMessage: string, errorStackTrace: string[]) => {",
"score": 0.8388567566871643
},
{
"filename": "src/outputs/pull-request/appender.ts",
"retrieved_chunk": "}\nexport const reportSessionEnd = (packet: PacketSessionEnd) => {\n const {results, finishedAt, startedAt} = packet\n appendHeaderIfNotPrinted()\n outMessage += `${getTestSummary(results, startedAt, finishedAt)}\n ${getTestResultTable(results, true)}\n `\n}\nconst appendHeaderIfNotPrinted = () => {\n if (!headerPrinted) {",
"score": 0.8374497890472412
},
{
"filename": "src/logging.ts",
"retrieved_chunk": " const skipped = results.filter((t) => t.cause === TestResultCause.SKIPPED).length\n info(`\\nResults:\\n`)\n info(`Tests run: ${total}, Failures: ${failures}, Skipped: ${skipped}, Time elapsed: ${elapsed}\\n`)\n}",
"score": 0.8336014151573181
}
] |
typescript
|
export const getTestSummary = (results: PacketTestEnd[], startedAt: number, finishedAt: number) => {
|
import {
DocumentDuplicateIcon as CopyIcon,
InformationCircleIcon as InfoIcon,
} from "@heroicons/react/24/outline";
import va from "@vercel/analytics";
import {
PropsWithChildren,
useCallback,
useEffect,
useMemo,
useState,
} from "react";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
import { Model } from "../data/modelMetadata";
import GitHubIcon from "./GitHubIcon";
export interface ModelCardProps {
visible: boolean;
onDismiss: () => void;
model: Model;
}
type Tabs = "python" | "js" | "curl";
export default function ModelCard(props: PropsWithChildren<ModelCardProps>) {
const { model, onDismiss, visible } = props;
const [activeTab, setActiveTab] = useState<Tabs>("python");
const selectTab = (tab: Tabs) => () => {
setActiveTab(tab);
};
const [style, setStyle] = useState({});
useEffect(() => {
import("react-syntax-highlighter/dist/esm/styles/prism/material-dark").then(
(mod) => setStyle(mod.default)
);
});
const modalClassName = [
"modal max-md:w-full max-md:modal-bottom",
visible ? "modal-open" : "",
];
const copyEndpoint = useCallback(() => {
navigator.clipboard.writeText(model.apiEndpoint);
}, [model.apiEndpoint]);
const selectOnClick = useCallback(
(event: React.MouseEvent<HTMLInputElement>) => {
event.currentTarget.select();
},
[]
);
const isTabSelected = useCallback(
(tab: Tabs) => {
return activeTab === tab ? "tab-active" : "";
},
[activeTab]
);
const code = useMemo(() => {
switch (activeTab) {
case "python":
return model.pythonCode;
case "js":
return model.jsCode;
case "curl":
return model.curlCode;
}
}, [activeTab, model]);
return (
<dialog className={modalClassName.join(" ")}>
<div className="modal-box max-w-full w-2/4">
<div className="prose w-full max-w-full">
<h3>{model.name}</h3>
<div className="my-10">
<div className="form-control">
<label className="label">
<span className="label-text font-medium text-lg">
API Endpoint
</span>
</label>
<div className="join">
<input
className="input input-bordered w-full min-w-fit max-w-full join-item cursor-default"
onClick={selectOnClick}
readOnly
value={model.apiEndpoint}
/>
<button className="btn join-item" onClick={copyEndpoint}>
<CopyIcon className="w-5 h-5" />
</button>
</div>
</div>
<div className="rounded-md bg-base-200 border border-base-content/10 p-4 my-6">
<p className="text-lg font-bold space-x-2">
<InfoIcon className="stroke-info w-8 h-8 inline-block" />
<span className="text-info-content dark:text-info">
You can call this API right now!
</span>
</p>
<p>
You can use this model in your application through our API. All
you need to do is to sign in and get a token.
</p>
<p>
<a href="https://youtu.be/jV6cP0PyRY0">
Watch this tutorial to help you get started!
</a>
</p>
<div className="text-center">
<a
className="btn btn-outline btn-active"
href="https://serverless.fal.ai/api/auth/login"
target="_blank"
onClick={() => {
va.track("github-login");
}}
>
<
|
GitHubIcon />{" "}
|
<span className="ms-3">
{" "}
Sign in with Github to get a token{" "}
</span>
</a>
</div>
</div>
</div>
</div>
<div>
<div className="tabs w-full text-lg">
<a
className={`tab tab-lifted ${isTabSelected("python")}`}
onClick={selectTab("python")}
>
Python
</a>
<a
className={`tab tab-lifted ${isTabSelected("js")}`}
onClick={selectTab("js")}
>
JavaScript
</a>
<a
className={`tab tab-lifted ${isTabSelected("curl")}`}
onClick={selectTab("curl")}
>
cURL
</a>
</div>
<SyntaxHighlighter
text={code.trim()}
language={activeTab}
style={style}
>
{code.trim()}
</SyntaxHighlighter>
</div>
<div className="modal-action">
<button className="btn btn-outline" onClick={onDismiss}>
Done
</button>
</div>
</div>
<form method="dialog" className="modal-backdrop bg-black bg-opacity-50">
<button onClick={onDismiss}>close</button>
</form>
</dialog>
);
}
|
src/components/ModelCard.tsx
|
fal-ai-edit-anything-app-4e32d65
|
[
{
"filename": "src/pages/_navbar.tsx",
"retrieved_chunk": " >\n fal-serverless\n </a>\n </span>\n </div>\n <div className=\"flex\">\n <a\n href=\"https://github.com/fal-ai/edit-anything-app\"\n target=\"_blank\"\n className=\"opacity-40 hover:opacity-70 dark:opacity-60 dark:hover:opacity-90 transition-opacity duration-200 pe-2 md:pe-0\"",
"score": 0.8781052231788635
},
{
"filename": "src/pages/_footer.tsx",
"retrieved_chunk": " width=\"24\"\n height=\"24\"\n viewBox=\"0 0 24 24\"\n className=\"fill-current\"\n >\n <path d=\"M24 4.557c-.883.392-1.832.656-2.828.775 1.017-.609 1.798-1.574 2.165-2.724-.951.564-2.005.974-3.127 1.195-.897-.957-2.178-1.555-3.594-1.555-3.179 0-5.515 2.966-4.797 6.045-4.091-.205-7.719-2.165-10.148-5.144-1.29 2.213-.669 5.108 1.523 6.574-.806-.026-1.566-.247-2.229-.616-.054 2.281 1.581 4.415 3.949 4.89-.693.188-1.452.232-2.224.084.626 1.956 2.444 3.379 4.6 3.419-2.07 1.623-4.678 2.348-7.29 2.04 2.179 1.397 4.768 2.212 7.548 2.212 9.142 0 14.307-7.721 13.995-14.646.962-.695 1.797-1.562 2.457-2.549z\"></path>\n </svg>\n </a>\n <a\n href=\"https://github.com/fal-ai/edit-anything-app\"",
"score": 0.8437279462814331
},
{
"filename": "src/pages/_navbar.tsx",
"retrieved_chunk": "import GitHubIcon from \"@/components/GitHubIcon\";\nexport default function NavBar() {\n return (\n <div className=\"navbar bg-base-300\">\n <div className=\"container mx-auto\">\n <div className=\"flex-1 max-md:flex-col\">\n <a\n className=\"text-lg normal-case tracking-wide ps-2 md:ps-0\"\n href=\"#\"\n >",
"score": 0.8402563333511353
},
{
"filename": "src/pages/_footer.tsx",
"retrieved_chunk": " className=\"opacity-40 hover:opacity-70\"\n target=\"_blank\"\n >\n <GitHubIcon />\n </a>\n </div>\n </footer>\n );\n}",
"score": 0.8401263356208801
},
{
"filename": "src/pages/_navbar.tsx",
"retrieved_chunk": " >\n <GitHubIcon />\n </a>\n </div>\n </div>\n </div>\n );\n}",
"score": 0.8391618728637695
}
] |
typescript
|
GitHubIcon />{" "}
|
import NextImage from "next/image";
import Card from "./Card";
import EmptyMessage from "./EmptyMessage";
interface StableDiffusionButtonGroupProps {
setActiveTab: (tab: string) => void;
activeTab: string;
}
export const StableDiffusionOptionsButtonGroup = (
props: StableDiffusionButtonGroupProps
) => {
const { setActiveTab, activeTab } = props;
const tabClass = (tabName: string) =>
props.activeTab === tabName ? "btn-primary" : "";
return (
<div className="max-md:px-2 flex container mx-auto pt-8 w-full">
<div className="join">
<button
onClick={() => setActiveTab("replace")}
className={`btn ${tabClass("replace")} join-item`}
>
Replace
</button>
<button
onClick={() => setActiveTab("remove")}
className={`btn ${tabClass("remove")} join-item`}
>
Remove
</button>
<button
onClick={() => setActiveTab("fill")}
className={`btn ${tabClass("fill")} join-item`}
>
Fill
</button>
</div>
</div>
);
};
interface StableDiffusionInputProps {
setActiveTab: (tab: string) => void;
activeTab: string;
setPrompt: (prompt: string) => void;
setFillPrompt: (prompt: string) => void;
prompt: string;
fillPrompt: string;
isLoading: boolean;
selectedMask: string | null;
hasPrompt: boolean | string;
hasFillPrompt: boolean | string;
handleReplace: () => void;
handleRemove: () => void;
handleFill: () => void;
replacedImageUrls: string[];
removedImageUrls: string[];
filledImageUrls: string[];
}
export const StableDiffusionInput = (props: StableDiffusionInputProps) => {
const {
activeTab,
setActiveTab,
setPrompt,
prompt,
fillPrompt,
hasFillPrompt,
isLoading,
handleReplace,
handleRemove,
handleFill,
setFillPrompt,
selectedMask,
hasPrompt,
replacedImageUrls,
removedImageUrls,
filledImageUrls,
} = props;
return (
<div>
<StableDiffusionOptionsButtonGroup
activeTab={activeTab}
setActiveTab={setActiveTab}
/>
{activeTab === "replace" && (
<div className="container mx-auto pt-8 w-full">
<Card title="Replace...">
<div className="flex flex-col md:flex-row md:space-x-6">
<div className="form-control w-full md:w-3/5 max-w-full">
<label>
<input
id="prompt_input"
type="text"
name="prompt"
value={prompt}
onChange={(e) => setPrompt(e.target.value)}
placeholder="something creative, like 'a bus on the moon'"
className="input placeholder-gray-400 dark:placeholder-gray-600 w-full"
disabled={isLoading}
/>
</label>
</div>
<button
className="btn btn-primary max-sm:btn-wide mt-4 mx-auto md:mx-0 md:mt-0"
disabled={isLoading || !selectedMask || !hasPrompt}
onClick={handleReplace}
>
{selectedMask ? "Generate" : "Pick one of the mask options"}
</button>
</div>
{replacedImageUrls.length === 0 && (
<div className="my-12">
|
<EmptyMessage message="Nothing to see just yet" />
</div>
)}
|
<div className="grid grid-cols-1 gap-4 mt-4 md:mt-6 lg:p-12 mx-auto">
{replacedImageUrls.map((url, index) => (
<NextImage
key={index}
src={url}
alt={`Generated Image ${index + 1}`}
width={0}
height={0}
sizes="100vw"
style={{ width: "100%", height: "auto" }}
className="my-0"
/>
))}
</div>
</Card>
</div>
)}
{activeTab === "remove" && (
<div className="container mx-auto pt-8 w-full">
<Card title="Remove...">
<div className="flex flex-col md:flex-row md:space-x-6">
<button
className="btn btn-primary max-sm:btn-wide mt-4 mx-auto md:mx-0 md:mt-0"
disabled={isLoading || !selectedMask}
onClick={handleRemove}
>
{selectedMask ? "Remove" : "Pick one of the mask options"}
</button>
</div>
{removedImageUrls.length === 0 && (
<div className="my-12">
<EmptyMessage message="Nothing to see just yet" />
</div>
)}
<div className="grid grid-cols-1 gap-4 mt-4 md:mt-6 lg:p-12 mx-auto">
{removedImageUrls.map((url, index) => (
<NextImage
key={index}
src={url}
alt={`Generated Image ${index + 1}`}
width={0}
height={0}
sizes="100vw"
style={{ width: "100%", height: "auto" }}
className="my-0"
/>
))}
</div>
</Card>
</div>
)}
{activeTab === "fill" && (
<div className="container mx-auto pt-8 w-full">
<Card title="Fill...">
<div className="flex flex-col md:flex-row md:space-x-6">
<div className="form-control w-full md:w-3/5 max-w-full">
<label>
<input
id="fill_prompt_input"
type="text"
name="fill_prompt"
value={fillPrompt}
onChange={(e) => setFillPrompt(e.target.value)}
placeholder="something creative, like 'an alien'"
className="input placeholder-gray-400 dark:placeholder-gray-600 w-full"
disabled={isLoading}
/>
</label>
</div>
<button
className="btn btn-primary max-sm:btn-wide mt-4 mx-auto md:mx-0 md:mt-0"
disabled={isLoading || !selectedMask || !hasFillPrompt}
onClick={handleFill}
>
{selectedMask ? "Fill" : "Pick one of the mask options"}
</button>
</div>
{filledImageUrls.length === 0 && (
<div className="my-12">
<EmptyMessage message="Nothing to see just yet" />
</div>
)}
<div className="grid grid-cols-1 gap-4 mt-4 md:mt-6 lg:p-12 mx-auto">
{filledImageUrls.map((url, index) => (
<NextImage
key={index}
src={url}
alt={`Generated Image ${index + 1}`}
width={0}
height={0}
sizes="100vw"
style={{ width: "100%", height: "auto" }}
className="my-0"
/>
))}
</div>
</Card>
</div>
)}
</div>
);
};
|
src/components/StableDiffusion.tsx
|
fal-ai-edit-anything-app-4e32d65
|
[
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " handleReplace={handleReplace}\n handleRemove={handleRemove}\n handleFill={handleFill}\n setFillPrompt={setFillPrompt}\n selectedMask={selectedMask}\n hasPrompt={hasPrompt}\n replacedImageUrls={replacedImageUrls}\n removedImageUrls={removedImageUrls}\n filledImageUrls={filledImageUrls}\n />",
"score": 0.8924597501754761
},
{
"filename": "src/components/MaskPicker.tsx",
"retrieved_chunk": " </div>\n <div className=\"flex flex-col items-center\">\n <button\n className=\"btn btn-primary max-sm:btn-wide mb-4 md:mb-0\"\n disabled={isLoading || !selectedImage || !position}\n onClick={generateMasks}\n >\n {position ? \"Generate masks\" : \"Set the mask reference point\"}\n </button>\n </div>",
"score": 0.8919060230255127
},
{
"filename": "src/components/MaskPicker.tsx",
"retrieved_chunk": " <button\n className=\"btn btn-primary max-sm:btn-wide mb-4 md:mb-0\"\n disabled={isLoading || !selectedImage || !position}\n onClick={generateMasks}\n >\n {position ? \"Regenerate\" : \"Set the mask reference point\"}\n </button>\n </>\n )}\n </Card>",
"score": 0.8822870254516602
},
{
"filename": "src/components/MaskPicker.tsx",
"retrieved_chunk": " </div>\n )}\n {displayMasks.length > 0 && (\n <>\n {props.selectedModel.id === \"sam\" && (\n <span className=\"font-light mb-0 inline-block opacity-70\">\n <strong>Hint:</strong> click on the image select a mask\n </span>\n )}\n <div className=\"grid grid-cols-1 space-y-2\">",
"score": 0.8787862062454224
},
{
"filename": "src/components/MaskPicker.tsx",
"retrieved_chunk": " {displayMasks.map((mask, index) => (\n <ImageMask\n key={index}\n alt={`Mask ${index}`}\n mask={mask}\n selected={selectedMask === mask}\n onClick={handleMaskSelected}\n />\n ))}\n </div>",
"score": 0.8715493679046631
}
] |
typescript
|
<EmptyMessage message="Nothing to see just yet" />
</div>
)}
|
import { ImageFile } from "@/data/image";
import { Model } from "@/data/modelMetadata";
import { PropsWithChildren } from "react";
import Card from "./Card";
import EmptyMessage from "./EmptyMessage";
import ImageMask from "./ImageMask";
export interface MaskPickerProps {
displayMasks: string[];
masks: string[];
dilation: number;
isLoading: boolean;
setDilation: (dilation: number) => void;
selectedImage: ImageFile | null;
position: { x: number; y: number } | null;
generateMasks: () => void;
selectedMask: string | null;
handleMaskSelected: (mask: string) => void;
selectedModel: Model;
}
export default function MaskPicker(props: PropsWithChildren<MaskPickerProps>) {
const {
displayMasks,
masks,
dilation,
isLoading,
setDilation,
selectedImage,
position,
generateMasks,
selectedMask,
handleMaskSelected,
} = props;
return (
<Card title="Masks" classNames="min-h-full">
<label>
Dilation:
<input
id="mask_dilation"
type="number"
name="dilation"
value={dilation}
onChange={(e) => setDilation(parseInt(e.target.value))} // @ts-nocheck
className="input placeholder-gray-400 dark:placeholder-gray-600 w-full"
disabled={isLoading}
/>
</label>
{displayMasks.length === 0 && (
<div className="items-center mt-0 md:mt-12">
<div className="hidden md:display">
<EmptyMessage message="No masks generated yet" />
</div>
<div className="flex flex-col items-center">
<button
className="btn btn-primary max-sm:btn-wide mb-4 md:mb-0"
disabled={isLoading || !selectedImage || !position}
onClick={generateMasks}
>
{position ? "Generate masks" : "Set the mask reference point"}
</button>
</div>
</div>
)}
{displayMasks.length > 0 && (
<>
{props.selectedModel.id === "sam" && (
<span className="font-light mb-0 inline-block opacity-70">
<strong>Hint:</strong> click on the image select a mask
</span>
)}
<div className="grid grid-cols-1 space-y-2">
{displayMasks.map((mask, index) => (
<
|
ImageMask
key={index}
|
alt={`Mask ${index}`}
mask={mask}
selected={selectedMask === mask}
onClick={handleMaskSelected}
/>
))}
</div>
<button
className="btn btn-primary max-sm:btn-wide mb-4 md:mb-0"
disabled={isLoading || !selectedImage || !position}
onClick={generateMasks}
>
{position ? "Regenerate" : "Set the mask reference point"}
</button>
</>
)}
</Card>
);
}
|
src/components/MaskPicker.tsx
|
fal-ai-edit-anything-app-4e32d65
|
[
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " singleImageResultUrl={singleImageResultUrl}\n />\n )}\n {selectedModel.id === \"sam\" && (\n <MaskPicker\n selectedModel={selectedModel}\n displayMasks={displayMasks}\n masks={masks}\n dilation={dilation}\n isLoading={isLoading}",
"score": 0.8811171054840088
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " <ImageSelector\n onImageSelect={handleImageSelected}\n disabled={isLoading}\n />\n )}\n {selectedImage && (\n <>\n <div className=\"flex justify-between\">\n {selectedModel.id === \"sam\" && (\n <span className=\"font-light mb-0 inline-block opacity-70\">",
"score": 0.8585492372512817
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " setDilation={setDilation}\n selectedImage={selectedImage}\n position={position}\n generateMasks={generateMasks}\n selectedMask={selectedDisplayMask}\n handleMaskSelected={handleMaskSelected}\n />\n )}\n </div>\n </div>",
"score": 0.8526250123977661
},
{
"filename": "src/components/ImageMask.tsx",
"retrieved_chunk": "import NextImage from \"next/image\";\nimport { useCallback } from \"react\";\nexport interface ImageMaskProps {\n mask: string;\n alt: string;\n selected: boolean;\n onClick: (mask: string) => void;\n}\nexport default function ImageMask(props: ImageMaskProps) {\n const borderClasses = [",
"score": 0.8455899357795715
},
{
"filename": "src/components/ImageMask.tsx",
"retrieved_chunk": " const handleMaskClick = useCallback(() => {\n onClick(mask);\n }, [mask, onClick]);\n return (\n <div\n className={`border-2 p-2 ${\n selected ? selectedBorderClasses : borderClasses\n }`}\n onClick={handleMaskClick}\n >",
"score": 0.8436691761016846
}
] |
typescript
|
ImageMask
key={index}
|
import {
DocumentDuplicateIcon as CopyIcon,
InformationCircleIcon as InfoIcon,
} from "@heroicons/react/24/outline";
import va from "@vercel/analytics";
import {
PropsWithChildren,
useCallback,
useEffect,
useMemo,
useState,
} from "react";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
import { Model } from "../data/modelMetadata";
import GitHubIcon from "./GitHubIcon";
export interface ModelCardProps {
visible: boolean;
onDismiss: () => void;
model: Model;
}
type Tabs = "python" | "js" | "curl";
export default function ModelCard(props: PropsWithChildren<ModelCardProps>) {
const { model, onDismiss, visible } = props;
const [activeTab, setActiveTab] = useState<Tabs>("python");
const selectTab = (tab: Tabs) => () => {
setActiveTab(tab);
};
const [style, setStyle] = useState({});
useEffect(() => {
import("react-syntax-highlighter/dist/esm/styles/prism/material-dark").then(
(mod) => setStyle(mod.default)
);
});
const modalClassName = [
"modal max-md:w-full max-md:modal-bottom",
visible ? "modal-open" : "",
];
const copyEndpoint = useCallback(() => {
navigator.clipboard.writeText(model.apiEndpoint);
}, [model.apiEndpoint]);
const selectOnClick = useCallback(
(event: React.MouseEvent<HTMLInputElement>) => {
event.currentTarget.select();
},
[]
);
const isTabSelected = useCallback(
(tab: Tabs) => {
return activeTab === tab ? "tab-active" : "";
},
[activeTab]
);
const code = useMemo(() => {
switch (activeTab) {
case "python":
return model.pythonCode;
case "js":
return model.jsCode;
case "curl":
|
return model.curlCode;
|
}
}, [activeTab, model]);
return (
<dialog className={modalClassName.join(" ")}>
<div className="modal-box max-w-full w-2/4">
<div className="prose w-full max-w-full">
<h3>{model.name}</h3>
<div className="my-10">
<div className="form-control">
<label className="label">
<span className="label-text font-medium text-lg">
API Endpoint
</span>
</label>
<div className="join">
<input
className="input input-bordered w-full min-w-fit max-w-full join-item cursor-default"
onClick={selectOnClick}
readOnly
value={model.apiEndpoint}
/>
<button className="btn join-item" onClick={copyEndpoint}>
<CopyIcon className="w-5 h-5" />
</button>
</div>
</div>
<div className="rounded-md bg-base-200 border border-base-content/10 p-4 my-6">
<p className="text-lg font-bold space-x-2">
<InfoIcon className="stroke-info w-8 h-8 inline-block" />
<span className="text-info-content dark:text-info">
You can call this API right now!
</span>
</p>
<p>
You can use this model in your application through our API. All
you need to do is to sign in and get a token.
</p>
<p>
<a href="https://youtu.be/jV6cP0PyRY0">
Watch this tutorial to help you get started!
</a>
</p>
<div className="text-center">
<a
className="btn btn-outline btn-active"
href="https://serverless.fal.ai/api/auth/login"
target="_blank"
onClick={() => {
va.track("github-login");
}}
>
<GitHubIcon />{" "}
<span className="ms-3">
{" "}
Sign in with Github to get a token{" "}
</span>
</a>
</div>
</div>
</div>
</div>
<div>
<div className="tabs w-full text-lg">
<a
className={`tab tab-lifted ${isTabSelected("python")}`}
onClick={selectTab("python")}
>
Python
</a>
<a
className={`tab tab-lifted ${isTabSelected("js")}`}
onClick={selectTab("js")}
>
JavaScript
</a>
<a
className={`tab tab-lifted ${isTabSelected("curl")}`}
onClick={selectTab("curl")}
>
cURL
</a>
</div>
<SyntaxHighlighter
text={code.trim()}
language={activeTab}
style={style}
>
{code.trim()}
</SyntaxHighlighter>
</div>
<div className="modal-action">
<button className="btn btn-outline" onClick={onDismiss}>
Done
</button>
</div>
</div>
<form method="dialog" className="modal-backdrop bg-black bg-opacity-50">
<button onClick={onDismiss}>close</button>
</form>
</dialog>
);
}
|
src/components/ModelCard.tsx
|
fal-ai-edit-anything-app-4e32d65
|
[
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " const [imageCount, setImageCount] = useState(0);\n const [dilation, setDilation] = useState(0);\n const [activeTab, setActiveTab] = useState(\"replace\");\n const [selectedModel, setSelectedModel] = useState<Model>(models[\"sam\"]);\n const [singleImageResultUrl, setSingleImageResultUrl] = useState<\n string | null\n >(null);\n const [scribblePaused, setScriblePaused] = useState(false);\n const [showModelDetails, setShowModelDetails] = useState(false);\n const reset = () => {",
"score": 0.8310354948043823
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " <div>\n {selectedModel.id === \"sam\" && (\n <StableDiffusionInput\n activeTab={activeTab}\n setActiveTab={setActiveTab}\n setPrompt={setPrompt}\n prompt={prompt}\n fillPrompt={fillPrompt}\n hasFillPrompt={hasFillPrompt}\n isLoading={isLoading}",
"score": 0.8284801840782166
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " onSelect={handleModelSelected}\n selectedModel={selectedModel}\n />\n </div>\n <div className=\"hidden md:flex items-end justify-end\">\n <button\n className=\"btn btn-outline\"\n onClick={() => setShowModelDetails(true)}\n >\n <CodeBracketIcon className=\"h-6 w-6\" />",
"score": 0.8077722787857056
},
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " />\n {activeTab === \"replace\" && (\n <div className=\"container mx-auto pt-8 w-full\">\n <Card title=\"Replace...\">\n <div className=\"flex flex-col md:flex-row md:space-x-6\">\n <div className=\"form-control w-full md:w-3/5 max-w-full\">\n <label>\n <input\n id=\"prompt_input\"\n type=\"text\"",
"score": 0.806223452091217
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " Show code\n </button>\n </div>\n <div className=\"hidden md:display md:col-span-3\">\n <Card>\n <Steps currentStep={step} />\n </Card>\n </div>\n <div className=\"md:col-span-2\">\n {selectedModel.id === \"controlnet\" && (",
"score": 0.8061652183532715
}
] |
typescript
|
return model.curlCode;
|
import {
DocumentDuplicateIcon as CopyIcon,
InformationCircleIcon as InfoIcon,
} from "@heroicons/react/24/outline";
import va from "@vercel/analytics";
import {
PropsWithChildren,
useCallback,
useEffect,
useMemo,
useState,
} from "react";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
import { Model } from "../data/modelMetadata";
import GitHubIcon from "./GitHubIcon";
export interface ModelCardProps {
visible: boolean;
onDismiss: () => void;
model: Model;
}
type Tabs = "python" | "js" | "curl";
export default function ModelCard(props: PropsWithChildren<ModelCardProps>) {
const { model, onDismiss, visible } = props;
const [activeTab, setActiveTab] = useState<Tabs>("python");
const selectTab = (tab: Tabs) => () => {
setActiveTab(tab);
};
const [style, setStyle] = useState({});
useEffect(() => {
import("react-syntax-highlighter/dist/esm/styles/prism/material-dark").then(
(mod) => setStyle(mod.default)
);
});
const modalClassName = [
"modal max-md:w-full max-md:modal-bottom",
visible ? "modal-open" : "",
];
const copyEndpoint = useCallback(() => {
navigator.clipboard.writeText(model.apiEndpoint);
}, [model.apiEndpoint]);
const selectOnClick = useCallback(
(event: React.MouseEvent<HTMLInputElement>) => {
event.currentTarget.select();
},
[]
);
const isTabSelected = useCallback(
(tab: Tabs) => {
return activeTab === tab ? "tab-active" : "";
},
[activeTab]
);
const code = useMemo(() => {
switch (activeTab) {
case "python":
return model.pythonCode;
case "js":
return model.jsCode;
case "curl":
return model.curlCode;
}
}, [activeTab, model]);
return (
<dialog className={modalClassName.join(" ")}>
<div className="modal-box max-w-full w-2/4">
<div className="prose w-full max-w-full">
<h3>{model.name}</h3>
<div className="my-10">
<div className="form-control">
<label className="label">
<span className="label-text font-medium text-lg">
API Endpoint
</span>
</label>
<div className="join">
<input
className="input input-bordered w-full min-w-fit max-w-full join-item cursor-default"
onClick={selectOnClick}
readOnly
value={model.apiEndpoint}
/>
<button className="btn join-item" onClick={copyEndpoint}>
<CopyIcon className="w-5 h-5" />
</button>
</div>
</div>
<div className="rounded-md bg-base-200 border border-base-content/10 p-4 my-6">
<p className="text-lg font-bold space-x-2">
<InfoIcon className="stroke-info w-8 h-8 inline-block" />
<span className="text-info-content dark:text-info">
You can call this API right now!
</span>
</p>
<p>
You can use this model in your application through our API. All
you need to do is to sign in and get a token.
</p>
<p>
<a href="https://youtu.be/jV6cP0PyRY0">
Watch this tutorial to help you get started!
</a>
</p>
<div className="text-center">
<a
className="btn btn-outline btn-active"
href="https://serverless.fal.ai/api/auth/login"
target="_blank"
onClick={() => {
va.track("github-login");
}}
>
|
<GitHubIcon />{" "}
|
<span className="ms-3">
{" "}
Sign in with Github to get a token{" "}
</span>
</a>
</div>
</div>
</div>
</div>
<div>
<div className="tabs w-full text-lg">
<a
className={`tab tab-lifted ${isTabSelected("python")}`}
onClick={selectTab("python")}
>
Python
</a>
<a
className={`tab tab-lifted ${isTabSelected("js")}`}
onClick={selectTab("js")}
>
JavaScript
</a>
<a
className={`tab tab-lifted ${isTabSelected("curl")}`}
onClick={selectTab("curl")}
>
cURL
</a>
</div>
<SyntaxHighlighter
text={code.trim()}
language={activeTab}
style={style}
>
{code.trim()}
</SyntaxHighlighter>
</div>
<div className="modal-action">
<button className="btn btn-outline" onClick={onDismiss}>
Done
</button>
</div>
</div>
<form method="dialog" className="modal-backdrop bg-black bg-opacity-50">
<button onClick={onDismiss}>close</button>
</form>
</dialog>
);
}
|
src/components/ModelCard.tsx
|
fal-ai-edit-anything-app-4e32d65
|
[
{
"filename": "src/pages/_navbar.tsx",
"retrieved_chunk": " >\n fal-serverless\n </a>\n </span>\n </div>\n <div className=\"flex\">\n <a\n href=\"https://github.com/fal-ai/edit-anything-app\"\n target=\"_blank\"\n className=\"opacity-40 hover:opacity-70 dark:opacity-60 dark:hover:opacity-90 transition-opacity duration-200 pe-2 md:pe-0\"",
"score": 0.8853480815887451
},
{
"filename": "src/pages/_navbar.tsx",
"retrieved_chunk": "import GitHubIcon from \"@/components/GitHubIcon\";\nexport default function NavBar() {\n return (\n <div className=\"navbar bg-base-300\">\n <div className=\"container mx-auto\">\n <div className=\"flex-1 max-md:flex-col\">\n <a\n className=\"text-lg normal-case tracking-wide ps-2 md:ps-0\"\n href=\"#\"\n >",
"score": 0.8406162261962891
},
{
"filename": "src/pages/_footer.tsx",
"retrieved_chunk": " className=\"opacity-40 hover:opacity-70\"\n target=\"_blank\"\n >\n <GitHubIcon />\n </a>\n </div>\n </footer>\n );\n}",
"score": 0.8388741612434387
},
{
"filename": "src/pages/_footer.tsx",
"retrieved_chunk": " width=\"24\"\n height=\"24\"\n viewBox=\"0 0 24 24\"\n className=\"fill-current\"\n >\n <path d=\"M24 4.557c-.883.392-1.832.656-2.828.775 1.017-.609 1.798-1.574 2.165-2.724-.951.564-2.005.974-3.127 1.195-.897-.957-2.178-1.555-3.594-1.555-3.179 0-5.515 2.966-4.797 6.045-4.091-.205-7.719-2.165-10.148-5.144-1.29 2.213-.669 5.108 1.523 6.574-.806-.026-1.566-.247-2.229-.616-.054 2.281 1.581 4.415 3.949 4.89-.693.188-1.452.232-2.224.084.626 1.956 2.444 3.379 4.6 3.419-2.07 1.623-4.678 2.348-7.29 2.04 2.179 1.397 4.768 2.212 7.548 2.212 9.142 0 14.307-7.721 13.995-14.646.962-.695 1.797-1.562 2.457-2.549z\"></path>\n </svg>\n </a>\n <a\n href=\"https://github.com/fal-ai/edit-anything-app\"",
"score": 0.8375184535980225
},
{
"filename": "src/pages/_footer.tsx",
"retrieved_chunk": " </p>\n </div>\n <div className=\"prose flex\">\n <a\n href=\"https://twitter.com/fal_ai_data\"\n className=\"opacity-40 hover:opacity-70\"\n target=\"_blank\"\n >\n <svg\n xmlns=\"http://www.w3.org/2000/svg\"",
"score": 0.8364298939704895
}
] |
typescript
|
<GitHubIcon />{" "}
|
import {
DocumentDuplicateIcon as CopyIcon,
InformationCircleIcon as InfoIcon,
} from "@heroicons/react/24/outline";
import va from "@vercel/analytics";
import {
PropsWithChildren,
useCallback,
useEffect,
useMemo,
useState,
} from "react";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
import { Model } from "../data/modelMetadata";
import GitHubIcon from "./GitHubIcon";
export interface ModelCardProps {
visible: boolean;
onDismiss: () => void;
model: Model;
}
type Tabs = "python" | "js" | "curl";
export default function ModelCard(props: PropsWithChildren<ModelCardProps>) {
const { model, onDismiss, visible } = props;
const [activeTab, setActiveTab] = useState<Tabs>("python");
const selectTab = (tab: Tabs) => () => {
setActiveTab(tab);
};
const [style, setStyle] = useState({});
useEffect(() => {
import("react-syntax-highlighter/dist/esm/styles/prism/material-dark").then(
(mod) => setStyle(mod.default)
);
});
const modalClassName = [
"modal max-md:w-full max-md:modal-bottom",
visible ? "modal-open" : "",
];
const copyEndpoint = useCallback(() => {
navigator.clipboard.writeText(model.apiEndpoint);
}, [model.apiEndpoint]);
const selectOnClick = useCallback(
(event: React.MouseEvent<HTMLInputElement>) => {
event.currentTarget.select();
},
[]
);
const isTabSelected = useCallback(
(tab: Tabs) => {
return activeTab === tab ? "tab-active" : "";
},
[activeTab]
);
const code = useMemo(() => {
switch (activeTab) {
case "python":
|
return model.pythonCode;
|
case "js":
return model.jsCode;
case "curl":
return model.curlCode;
}
}, [activeTab, model]);
return (
<dialog className={modalClassName.join(" ")}>
<div className="modal-box max-w-full w-2/4">
<div className="prose w-full max-w-full">
<h3>{model.name}</h3>
<div className="my-10">
<div className="form-control">
<label className="label">
<span className="label-text font-medium text-lg">
API Endpoint
</span>
</label>
<div className="join">
<input
className="input input-bordered w-full min-w-fit max-w-full join-item cursor-default"
onClick={selectOnClick}
readOnly
value={model.apiEndpoint}
/>
<button className="btn join-item" onClick={copyEndpoint}>
<CopyIcon className="w-5 h-5" />
</button>
</div>
</div>
<div className="rounded-md bg-base-200 border border-base-content/10 p-4 my-6">
<p className="text-lg font-bold space-x-2">
<InfoIcon className="stroke-info w-8 h-8 inline-block" />
<span className="text-info-content dark:text-info">
You can call this API right now!
</span>
</p>
<p>
You can use this model in your application through our API. All
you need to do is to sign in and get a token.
</p>
<p>
<a href="https://youtu.be/jV6cP0PyRY0">
Watch this tutorial to help you get started!
</a>
</p>
<div className="text-center">
<a
className="btn btn-outline btn-active"
href="https://serverless.fal.ai/api/auth/login"
target="_blank"
onClick={() => {
va.track("github-login");
}}
>
<GitHubIcon />{" "}
<span className="ms-3">
{" "}
Sign in with Github to get a token{" "}
</span>
</a>
</div>
</div>
</div>
</div>
<div>
<div className="tabs w-full text-lg">
<a
className={`tab tab-lifted ${isTabSelected("python")}`}
onClick={selectTab("python")}
>
Python
</a>
<a
className={`tab tab-lifted ${isTabSelected("js")}`}
onClick={selectTab("js")}
>
JavaScript
</a>
<a
className={`tab tab-lifted ${isTabSelected("curl")}`}
onClick={selectTab("curl")}
>
cURL
</a>
</div>
<SyntaxHighlighter
text={code.trim()}
language={activeTab}
style={style}
>
{code.trim()}
</SyntaxHighlighter>
</div>
<div className="modal-action">
<button className="btn btn-outline" onClick={onDismiss}>
Done
</button>
</div>
</div>
<form method="dialog" className="modal-backdrop bg-black bg-opacity-50">
<button onClick={onDismiss}>close</button>
</form>
</dialog>
);
}
|
src/components/ModelCard.tsx
|
fal-ai-edit-anything-app-4e32d65
|
[
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " const [imageCount, setImageCount] = useState(0);\n const [dilation, setDilation] = useState(0);\n const [activeTab, setActiveTab] = useState(\"replace\");\n const [selectedModel, setSelectedModel] = useState<Model>(models[\"sam\"]);\n const [singleImageResultUrl, setSingleImageResultUrl] = useState<\n string | null\n >(null);\n const [scribblePaused, setScriblePaused] = useState(false);\n const [showModelDetails, setShowModelDetails] = useState(false);\n const reset = () => {",
"score": 0.8236502408981323
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " <div>\n {selectedModel.id === \"sam\" && (\n <StableDiffusionInput\n activeTab={activeTab}\n setActiveTab={setActiveTab}\n setPrompt={setPrompt}\n prompt={prompt}\n fillPrompt={fillPrompt}\n hasFillPrompt={hasFillPrompt}\n isLoading={isLoading}",
"score": 0.8182836771011353
},
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " const { setActiveTab, activeTab } = props;\n const tabClass = (tabName: string) =>\n props.activeTab === tabName ? \"btn-primary\" : \"\";\n return (\n <div className=\"max-md:px-2 flex container mx-auto pt-8 w-full\">\n <div className=\"join\">\n <button\n onClick={() => setActiveTab(\"replace\")}\n className={`btn ${tabClass(\"replace\")} join-item`}\n >",
"score": 0.8094766139984131
},
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " className={`btn ${tabClass(\"fill\")} join-item`}\n >\n Fill\n </button>\n </div>\n </div>\n );\n};\ninterface StableDiffusionInputProps {\n setActiveTab: (tab: string) => void;",
"score": 0.8088778257369995
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " onSelect={handleModelSelected}\n selectedModel={selectedModel}\n />\n </div>\n <div className=\"hidden md:flex items-end justify-end\">\n <button\n className=\"btn btn-outline\"\n onClick={() => setShowModelDetails(true)}\n >\n <CodeBracketIcon className=\"h-6 w-6\" />",
"score": 0.8017737865447998
}
] |
typescript
|
return model.pythonCode;
|
import {
DocumentDuplicateIcon as CopyIcon,
InformationCircleIcon as InfoIcon,
} from "@heroicons/react/24/outline";
import va from "@vercel/analytics";
import {
PropsWithChildren,
useCallback,
useEffect,
useMemo,
useState,
} from "react";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
import { Model } from "../data/modelMetadata";
import GitHubIcon from "./GitHubIcon";
export interface ModelCardProps {
visible: boolean;
onDismiss: () => void;
model: Model;
}
type Tabs = "python" | "js" | "curl";
export default function ModelCard(props: PropsWithChildren<ModelCardProps>) {
const { model, onDismiss, visible } = props;
const [activeTab, setActiveTab] = useState<Tabs>("python");
const selectTab = (tab: Tabs) => () => {
setActiveTab(tab);
};
const [style, setStyle] = useState({});
useEffect(() => {
import("react-syntax-highlighter/dist/esm/styles/prism/material-dark").then(
(mod) => setStyle(mod.default)
);
});
const modalClassName = [
"modal max-md:w-full max-md:modal-bottom",
visible ? "modal-open" : "",
];
const copyEndpoint = useCallback(() => {
|
navigator.clipboard.writeText(model.apiEndpoint);
|
}, [model.apiEndpoint]);
const selectOnClick = useCallback(
(event: React.MouseEvent<HTMLInputElement>) => {
event.currentTarget.select();
},
[]
);
const isTabSelected = useCallback(
(tab: Tabs) => {
return activeTab === tab ? "tab-active" : "";
},
[activeTab]
);
const code = useMemo(() => {
switch (activeTab) {
case "python":
return model.pythonCode;
case "js":
return model.jsCode;
case "curl":
return model.curlCode;
}
}, [activeTab, model]);
return (
<dialog className={modalClassName.join(" ")}>
<div className="modal-box max-w-full w-2/4">
<div className="prose w-full max-w-full">
<h3>{model.name}</h3>
<div className="my-10">
<div className="form-control">
<label className="label">
<span className="label-text font-medium text-lg">
API Endpoint
</span>
</label>
<div className="join">
<input
className="input input-bordered w-full min-w-fit max-w-full join-item cursor-default"
onClick={selectOnClick}
readOnly
value={model.apiEndpoint}
/>
<button className="btn join-item" onClick={copyEndpoint}>
<CopyIcon className="w-5 h-5" />
</button>
</div>
</div>
<div className="rounded-md bg-base-200 border border-base-content/10 p-4 my-6">
<p className="text-lg font-bold space-x-2">
<InfoIcon className="stroke-info w-8 h-8 inline-block" />
<span className="text-info-content dark:text-info">
You can call this API right now!
</span>
</p>
<p>
You can use this model in your application through our API. All
you need to do is to sign in and get a token.
</p>
<p>
<a href="https://youtu.be/jV6cP0PyRY0">
Watch this tutorial to help you get started!
</a>
</p>
<div className="text-center">
<a
className="btn btn-outline btn-active"
href="https://serverless.fal.ai/api/auth/login"
target="_blank"
onClick={() => {
va.track("github-login");
}}
>
<GitHubIcon />{" "}
<span className="ms-3">
{" "}
Sign in with Github to get a token{" "}
</span>
</a>
</div>
</div>
</div>
</div>
<div>
<div className="tabs w-full text-lg">
<a
className={`tab tab-lifted ${isTabSelected("python")}`}
onClick={selectTab("python")}
>
Python
</a>
<a
className={`tab tab-lifted ${isTabSelected("js")}`}
onClick={selectTab("js")}
>
JavaScript
</a>
<a
className={`tab tab-lifted ${isTabSelected("curl")}`}
onClick={selectTab("curl")}
>
cURL
</a>
</div>
<SyntaxHighlighter
text={code.trim()}
language={activeTab}
style={style}
>
{code.trim()}
</SyntaxHighlighter>
</div>
<div className="modal-action">
<button className="btn btn-outline" onClick={onDismiss}>
Done
</button>
</div>
</div>
<form method="dialog" className="modal-backdrop bg-black bg-opacity-50">
<button onClick={onDismiss}>close</button>
</form>
</dialog>
);
}
|
src/components/ModelCard.tsx
|
fal-ai-edit-anything-app-4e32d65
|
[
{
"filename": "src/components/ScribbleBox.tsx",
"retrieved_chunk": " const canvasRef: any = React.createRef();\n const { handleScrible, setScriblePaused } = props;\n const onChange = async () => {\n const paths = await canvasRef.current.exportPaths();\n localStorage.setItem(\"paths\", JSON.stringify(paths, null, 2));\n if (!paths.length) return;\n setScriblePaused(true);\n const data = await canvasRef.current.exportImage(\"png\");\n handleScrible(data);\n };",
"score": 0.8140931129455566
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " onSelect={handleModelSelected}\n selectedModel={selectedModel}\n />\n </div>\n <div className=\"hidden md:flex items-end justify-end\">\n <button\n className=\"btn btn-outline\"\n onClick={() => setShowModelDetails(true)}\n >\n <CodeBracketIcon className=\"h-6 w-6\" />",
"score": 0.8106481432914734
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " mask_url: selectedMask,\n image_url: selectedImage,\n prompt,\n };\n await handleAction(\"/api/fill\", body, setFilledImageUrls);\n }\n };\n const handleModelSelected = (modelId: string) => {\n va.track(\"model-selected-\" + modelId);\n setSelectedModel(models[modelId]);",
"score": 0.807212233543396
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " const [imageCount, setImageCount] = useState(0);\n const [dilation, setDilation] = useState(0);\n const [activeTab, setActiveTab] = useState(\"replace\");\n const [selectedModel, setSelectedModel] = useState<Model>(models[\"sam\"]);\n const [singleImageResultUrl, setSingleImageResultUrl] = useState<\n string | null\n >(null);\n const [scribblePaused, setScriblePaused] = useState(false);\n const [showModelDetails, setShowModelDetails] = useState(false);\n const reset = () => {",
"score": 0.8022258281707764
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " const promtValue = promptEl ? promptEl.value : null;\n const response = await fetch(`/api/${selectedModel.id}`, {\n method: \"POST\",\n headers: {\n accept: \"application/json\",\n \"content-type\": \"application/json\",\n },\n body: JSON.stringify({\n base64Image: selectedImage.data,\n prompt: promtValue,",
"score": 0.802204966545105
}
] |
typescript
|
navigator.clipboard.writeText(model.apiEndpoint);
|
import { TObject, TUnion } from '@sinclair/typebox';
import { Value, ValueError } from '@sinclair/typebox/value';
import { TypeCompiler } from '@sinclair/typebox/compiler';
import { AbstractTypedUnionValidator } from './abstract-typed-union-validator';
import {
createErrorsIterable,
createUnionTypeError,
createUnionTypeErrorIterable,
throwInvalidAssert,
throwInvalidValidate,
} from '../lib/error-utils';
export type FindSchemaMemberIndex = (value: unknown) => number | null;
export type SchemaMemberTest = (value: object) => boolean;
/**
* Abstract validatory for typed unions, performing lazy compilation.
*/
export abstract class AbstractCompilingTypedUnionValidator<
S extends TUnion<TObject[]>
> extends AbstractTypedUnionValidator<S> {
#compiledSchemaMemberTests: (SchemaMemberTest | undefined)[];
/** @inheritdoc */
constructor(schema: Readonly<S>) {
super(schema);
this.#compiledSchemaMemberTests = new Array(schema.anyOf.length);
}
/** @inheritdoc */
override test(value: Readonly<unknown>): boolean {
const memberIndex = this.compiledFindSchemaMemberIndex(value);
return this.compiledSchemaMemberTest(memberIndex, value);
}
/** @inheritdoc */
override errors(value: Readonly<unknown>): Iterable<ValueError> {
const indexOrError = this.compiledFindSchemaMemberIndexOrError(value);
if (typeof indexOrError !== 'number') {
return createUnionTypeErrorIterable(indexOrError);
}
return createErrorsIterable(
|
Value.Errors(this.schema.anyOf[indexOrError], value)
);
|
}
protected override assertReturningSchema(
value: Readonly<unknown>,
overallError?: string
): TObject {
const indexOrError = this.compiledFindSchemaMemberIndexOrError(value);
if (typeof indexOrError !== 'number') {
throwInvalidAssert(overallError, indexOrError);
}
const memberSchema = this.schema.anyOf[indexOrError];
if (!this.compiledSchemaMemberTest(indexOrError, value)) {
throwInvalidAssert(
overallError,
Value.Errors(memberSchema, value).First()!
);
}
return memberSchema;
}
protected override validateReturningSchema(
value: Readonly<unknown>,
overallError?: string
): TObject {
const indexOrError = this.compiledFindSchemaMemberIndexOrError(value);
if (typeof indexOrError !== 'number') {
throwInvalidValidate(overallError, indexOrError);
}
const memberSchema = this.schema.anyOf[indexOrError];
if (!this.compiledSchemaMemberTest(indexOrError, value)) {
throwInvalidValidate(overallError, Value.Errors(memberSchema, value));
}
return memberSchema;
}
protected compiledFindSchemaMemberIndexOrError(
value: Readonly<unknown>
): number | ValueError {
const memberIndex = this.compiledFindSchemaMemberIndex(value);
if (memberIndex === null) {
return createUnionTypeError(this.schema, value);
}
return memberIndex;
}
protected abstract compiledFindSchemaMemberIndex(
value: Readonly<unknown>
): number | null;
private compiledSchemaMemberTest(
memberIndex: number | null,
value: Readonly<unknown>
): boolean {
if (memberIndex === null) {
return false;
}
if (this.#compiledSchemaMemberTests[memberIndex] === undefined) {
let code = TypeCompiler.Compile(this.schema.anyOf[memberIndex]).Code();
code = code.replace(
`(typeof value === 'object' && value !== null && !Array.isArray(value)) &&`,
''
);
// provide some resilience to change in TypeBox compiled code formatting
const startOfFunction = code.indexOf('function');
const startOfReturn = code.indexOf('return', startOfFunction);
code =
'return ' +
code.substring(code.indexOf('(', startOfReturn), code.length - 1);
this.#compiledSchemaMemberTests[memberIndex] = new Function(
'value',
code
) as SchemaMemberTest;
}
return this.#compiledSchemaMemberTests[memberIndex]!(value);
}
}
|
src/abstract/abstract-compiling-typed-union-validator.ts
|
jtlapp-typebox-validators-0a2721a
|
[
{
"filename": "src/heterogeneous/heterogeneous-union-validator.ts",
"retrieved_chunk": " }\n return Value.Check(this.schema.anyOf[indexOrError], value);\n }\n /** @inheritdoc */\n override errors(value: Readonly<unknown>): Iterable<ValueError> {\n const indexOrError = this.findSchemaMemberIndex(value);\n if (typeof indexOrError !== 'number') {\n return createUnionTypeErrorIterable(indexOrError);\n }\n const schema = this.schema.anyOf[indexOrError] as TObject;",
"score": 0.944567084312439
},
{
"filename": "src/discriminated/discriminated-union-validator.ts",
"retrieved_chunk": " override test(value: Readonly<unknown>): boolean {\n const indexOrError = this.findSchemaMemberIndex(value);\n if (typeof indexOrError !== 'number') {\n return false;\n }\n return Value.Check(this.schema.anyOf[indexOrError], value);\n }\n /** @inheritdoc */\n override errors(value: Readonly<unknown>): Iterable<ValueError> {\n const indexOrError = this.findSchemaMemberIndex(value);",
"score": 0.9358895421028137
},
{
"filename": "src/heterogeneous/heterogeneous-union-validator.ts",
"retrieved_chunk": " return createErrorsIterable(Value.Errors(schema, value));\n }\n override assertReturningSchema(\n value: Readonly<unknown>,\n overallError?: string\n ): TObject {\n const indexOrError = this.findSchemaMemberIndex(value);\n if (typeof indexOrError !== 'number') {\n throwInvalidAssert(overallError, indexOrError);\n }",
"score": 0.9246691465377808
},
{
"filename": "src/standard/compiling-standard-validator.ts",
"retrieved_chunk": " }\n }\n /** @inheritdoc */\n override errors(value: Readonly<unknown>): Iterable<ValueError> {\n const compiledType = this.getCompiledType();\n return createErrorsIterable(compiledType.Errors(value));\n }\n private getCompiledType(): TypeCheck<S> {\n if (this.#compiledType === undefined) {\n this.#compiledType = TypeCompiler.Compile(this.schema);",
"score": 0.9242093563079834
},
{
"filename": "src/discriminated/discriminated-union-validator.ts",
"retrieved_chunk": " if (typeof indexOrError !== 'number') {\n return createUnionTypeErrorIterable(indexOrError);\n }\n const schema = this.schema.anyOf[indexOrError] as TObject;\n return createErrorsIterable(Value.Errors(schema, value));\n }\n override assertReturningSchema(\n value: Readonly<unknown>,\n overallError?: string\n ): TObject {",
"score": 0.9125511646270752
}
] |
typescript
|
Value.Errors(this.schema.anyOf[indexOrError], value)
);
|
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
|
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
|
server: {
host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
user: env.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
|
src/server/auth.ts
|
hackathon-ufrt-gptnotes-e185e8c
|
[
{
"filename": "src/server/api/routers/character.ts",
"retrieved_chunk": " .mutation(({ input, ctx }) => {\n return ctx.prisma.user.update({\n where: {\n id: ctx.session.user.id,\n },\n data: {\n activeCharacterId: input.id,\n },\n });\n }),",
"score": 0.8141169548034668
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " })\n )\n .mutation(({ input, ctx }) => {\n return ctx.prisma.message.create({\n data: {\n content: input.content,\n authorId: ctx.session.user.id,\n },\n });\n }),",
"score": 0.8118855953216553
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " where: {\n authorId: ctx.session.user.id,\n },\n });\n }),\n});",
"score": 0.8111631870269775
},
{
"filename": "src/server/api/routers/todo.ts",
"retrieved_chunk": " id: z.string(),\n })\n )\n .mutation(async ({ input, ctx }) => {\n const todo = await ctx.prisma.todo.findFirst({\n where: {\n id: input.id,\n authorId: ctx.session.user.id,\n },\n });",
"score": 0.8106293678283691
},
{
"filename": "src/server/api/routers/todo.ts",
"retrieved_chunk": " authorId: ctx.session.user.id,\n },\n });\n if (!todo) {\n throw new Error(\"No such todo\");\n }\n return ctx.prisma.todo.update({\n where: {\n id: input.id,\n },",
"score": 0.7966801524162292
}
] |
typescript
|
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
|
import { Kind, TObject, TUnion } from '@sinclair/typebox';
import {
ValueError,
ValueErrorIterator,
ValueErrorType,
} from '@sinclair/typebox/errors';
import { ValidationException } from './validation-exception';
export const DEFAULT_OVERALL_MESSAGE = 'Invalid value';
export const DEFAULT_UNKNOWN_TYPE_MESSAGE = 'Object type not recognized';
const TYPEBOX_REQUIRED_ERROR_MESSAGE = 'Expected required property';
export function adjustErrorMessage(error: ValueError): ValueError {
if (error.schema.errorMessage !== undefined) {
error.message = error.schema.errorMessage;
}
return error;
}
export function createErrorsIterable(
typeboxErrorIterator: ValueErrorIterator
): Iterable<ValueError> {
return {
[Symbol.iterator]: function* () {
const errors = typeboxErrorIterator[Symbol.iterator]();
let result = errors.next();
let customErrorPath = '???'; // signals no prior path ('' can be root path)
while (result.value !== undefined) {
const error = result.value;
const standardMessage = error.message;
if (error.path !== customErrorPath) {
adjustErrorMessage(error);
if (error.message != standardMessage) {
customErrorPath = error.path;
yield error;
} else if (
// drop 'required' errors for values that have constraints
error.message != TYPEBOX_REQUIRED_ERROR_MESSAGE ||
['Any', 'Unknown'].includes(error.schema[Kind])
) {
yield error;
}
}
result = errors.next();
}
},
};
}
export function createUnionTypeError(
unionSchema: Readonly<TUnion<TObject[]>>,
value: Readonly<unknown>
): ValueError {
return {
type: ValueErrorType.Union,
path: '',
schema: unionSchema,
value,
message: unionSchema.errorMessage ?? DEFAULT_UNKNOWN_TYPE_MESSAGE,
};
}
export function createUnionTypeErrorIterable(
typeError: ValueError
): Iterable<ValueError> {
return {
[Symbol.iterator]: function* () {
yield typeError;
},
};
}
export function throwInvalidAssert(
overallError: string | undefined,
firstError: ValueError
): never {
adjustErrorMessage(firstError);
throw new ValidationException(
overallError === undefined
? DEFAULT_OVERALL_MESSAGE
: overallError.replace(
'{error}',
|
ValidationException.errorToString(firstError)
),
[firstError]
);
|
}
export function throwInvalidValidate(
overallError: string | undefined,
errorOrErrors: ValueError | ValueErrorIterator
): never {
throw new ValidationException(
overallError ?? DEFAULT_OVERALL_MESSAGE,
errorOrErrors instanceof ValueErrorIterator
? [...createErrorsIterable(errorOrErrors)]
: [errorOrErrors]
);
}
|
src/lib/error-utils.ts
|
jtlapp-typebox-validators-0a2721a
|
[
{
"filename": "src/test/test-invalid-specs.ts",
"retrieved_chunk": " expect(details[i]?.message).toContain(error.message);\n });\n expect(e.message).toEqual(\n overallMessage ?? DEFAULT_OVERALL_MESSAGE\n );\n if (spec.validateString !== undefined) {\n expect(e.toString()).toEqual(spec.validateString);\n }\n }\n });",
"score": 0.8154350519180298
},
{
"filename": "src/lib/validation-exception.ts",
"retrieved_chunk": " */\n toString(): string {\n let message = this.message;\n if (this.details.length > 0) {\n if (!message.endsWith(':')) {\n message += ':';\n }\n for (const detail of this.details) {\n message += '\\n * ' + ValidationException.errorToString(detail);\n }",
"score": 0.814246416091919
},
{
"filename": "src/lib/validation-exception.ts",
"retrieved_chunk": " : error.message;\n }\n}",
"score": 0.8102260828018188
},
{
"filename": "src/test/test-invalid-specs.ts",
"retrieved_chunk": " try {\n (validator[method] as any)(spec.value, overallMessage);\n expect(false).toBe(true);\n } catch (e: any) {\n if (!(e instanceof ValidationException)) throw e;\n const details = e.details;\n const errors = spec.errors;\n expect(details.length).toEqual(errors.length);\n errors.forEach((error, i) => {\n expect(details[i]?.path).toEqual(error.path);",
"score": 0.8094829320907593
},
{
"filename": "src/test/test-invalid-specs.ts",
"retrieved_chunk": " expect(false).toBe(true);\n } catch (e: any) {\n if (!(e instanceof ValidationException)) throw e;\n const details = e.details;\n const errors = spec.errors;\n expect(details.length).toEqual(1);\n expect(details[0].path).toEqual(errors[0].path);\n expect(details[0].message).toContain(errors[0].message);\n if (spec.assertMessage !== undefined) {\n expect(e.message).toEqual(spec.assertMessage);",
"score": 0.8073577880859375
}
] |
typescript
|
ValidationException.errorToString(firstError)
),
[firstError]
);
|
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
server: {
host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
user: env.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
},
|
from: env.EMAIL_FROM
}),
GoogleProvider({
|
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
|
src/server/auth.ts
|
hackathon-ufrt-gptnotes-e185e8c
|
[
{
"filename": "src/server/db.ts",
"retrieved_chunk": " });\nif (env.NODE_ENV !== \"production\") globalForPrisma.prisma = prisma;",
"score": 0.8042411804199219
},
{
"filename": "src/server/db.ts",
"retrieved_chunk": "import { PrismaClient } from \"@prisma/client\";\nimport { env } from \"~/env.mjs\";\nconst globalForPrisma = globalThis as unknown as {\n prisma: PrismaClient | undefined;\n};\nexport const prisma =\n globalForPrisma.prisma ??\n new PrismaClient({\n log:\n env.NODE_ENV === \"development\" ? [\"query\", \"error\", \"warn\"] : [\"error\"],",
"score": 0.8009271025657654
},
{
"filename": "src/server/api/routers/me.ts",
"retrieved_chunk": " });\n }),\n});",
"score": 0.786649763584137
},
{
"filename": "src/pages/api/auth/[...nextauth].ts",
"retrieved_chunk": "import NextAuth from \"next-auth\";\nimport { authOptions } from \"~/server/auth\";\nexport default NextAuth(authOptions);",
"score": 0.7801024913787842
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": "import { Configuration, OpenAIApi } from \"openai\";\nimport { env } from \"../../env.mjs\";\nimport { ChatCompletionRequestMessageRoleEnum } from \"openai\";\nimport { Ratelimit } from \"@upstash/ratelimit\";\nimport { Redis } from \"@upstash/redis\";\nimport { ChatGPTTodo } from \"src/external/openai/chatGPTTodo\";\nimport { ChatGPTCharacter, ChatGPTMessage } from \"src/external/openai/chatGPTMessage\";\nimport { parseActionCode, stringifyActionCode } from \"src/external/openai/chatGPTActionItems\";\nconst configuration = new Configuration({\n organization: env.OPENAI_ORGANIZATION,",
"score": 0.7798641920089722
}
] |
typescript
|
from: env.EMAIL_FROM
}),
GoogleProvider({
|
import { TextInput } from "~/components/basic/TextInput";
import { useEffect, useRef, useState } from "react";
import { api } from "~/utils/api";
import { toast } from "react-toastify";
import { Message } from "~/components/chat/Message";
export function ChatBox() {
const [message, setMessage] = useState("");
const context = api.useContext();
const messages = api.message.findAll.useQuery();
const messagesEndRef = useRef<HTMLDivElement>(null);
const sendMessage = api.message.create.useMutation({
onSuccess: () => {
void context.message.invalidate();
setMessage("");
},
onError: (err) => {
toast.error(err.message);
},
});
const requestGPTResponse = api.message.generateGPT.useMutation({
onSuccess: () => {
void context.message.invalidate();
void context.todo.invalidate();
},
onError: (err) => {
toast.error(err.message);
},
});
const deleteMessage = api.message.deleteAll.useMutation({
onSuccess: async () => {
await context.message.invalidate();
},
onError: (err) => {
toast.error(err.message);
},
});
const clearChatHandler = (e: React.MouseEvent<HTMLButtonElement>) => {
e.preventDefault();
void toast.promise(
deleteMessage.mutateAsync(),
{
pending: "Loading...",
}
);
};
const onSubmit = (e: React.FormEvent<HTMLFormElement>) => {
e.preventDefault();
void sendMessage.mutateAsync({ content: message }).then(() => {
void toast.promise(requestGPTResponse.mutateAsync(), {
pending: "Thinking...",
});
});
};
const scrollToBottom = () => {
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
};
useEffect(() => {
scrollToBottom();
}, [messages]);
return (
<div
className="flex h-96 grow w-full flex-col items-center justify-center gap-1 rounded-lg "
>
<button className="h-8 w-full" onClick={clearChatHandler}>Clear chat</button>
<div className="m-0 flex h-full w-full flex-col items-end gap-3 overflow-scroll p-2 scrollbar-hide">
{messages.data?.slice(0).reverse().map((message, index) => (
<
|
Message message={message} key={index} />
))}
|
<div className="h-0 w-0" ref={messagesEndRef} />
</div>
<form className="flex w-full" onSubmit={onSubmit}>
<TextInput placeholder="Message" value={message} setValue={setMessage} />
<button className="h-8 w-20" type="submit">Send</button>
</form>
</div>
);
}
|
src/components/chat/ChatBox.tsx
|
hackathon-ufrt-gptnotes-e185e8c
|
[
{
"filename": "src/components/chat/Message.tsx",
"retrieved_chunk": " }, [message]);\n return (\n <div className=\"flex w-full flex-row\">\n {!message.isGPT && <div className=\"w-2/6\" />}\n <div\n className={\n \"w-full rounded-2xl bg-white p-2 \" +\n (message.isGPT ? \" rounded-bl-none\" : \"rounded-br-none\")\n }\n >",
"score": 0.8523516654968262
},
{
"filename": "src/components/chat/SelectCharacterBox.tsx",
"retrieved_chunk": " goToChat={props.goToChat}\n />\n ))}\n </div>\n );\n}",
"score": 0.8443681597709656
},
{
"filename": "src/components/chat/SelectPageWrapper.tsx",
"retrieved_chunk": " {selected === \"CHAT\" && <button\n className=\"h-10 w-10\"\n onClick={() => {\n setSelected(\"CHARACTER\");\n }}\n >\n {\"<\"}\n </button>}\n {selected === \"CHAT\" && me?.data?.activeCharacter?.name}\n </div>",
"score": 0.8416634798049927
},
{
"filename": "src/components/chat/SelectPageWrapper.tsx",
"retrieved_chunk": "import { useState } from \"react\";\nimport { ChatBox } from \"~/components/chat/ChatBox\";\nimport { SelectCharacterBox } from \"~/components/chat/SelectCharacterBox\";\nimport { api } from \"~/utils/api\";\nexport function SelectPageWrapper() {\n const [selected, setSelected] = useState<\"CHARACTER\" | \"CHAT\">(\"CHAT\");\n const me = api.me.getMe.useQuery();\n return (\n <div className=\"flex w-full flex-col overflow-clip rounded bg-purple-200\">\n <div className=\"h-10 w-full rounded bg-white\">",
"score": 0.8330236673355103
},
{
"filename": "src/components/chat/SelectCharacterBox.tsx",
"retrieved_chunk": "import { api } from \"~/utils/api\";\nimport { Character } from \"~/components/chat/Character\";\nexport function SelectCharacterBox(props: { goToChat: () => void }) {\n const characters = api.character.findAll.useQuery();\n return (\n <div className=\"ronded flex h-full w-full flex-col items-center gap-3 pl-2 pr-2 pt-3\">\n {characters.data?.map((character, index) => (\n <Character\n character={character}\n key={index}",
"score": 0.8323125839233398
}
] |
typescript
|
Message message={message} key={index} />
))}
|
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
server: {
|
host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
|
user: env.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
|
src/server/auth.ts
|
hackathon-ufrt-gptnotes-e185e8c
|
[
{
"filename": "src/server/db.ts",
"retrieved_chunk": "import { PrismaClient } from \"@prisma/client\";\nimport { env } from \"~/env.mjs\";\nconst globalForPrisma = globalThis as unknown as {\n prisma: PrismaClient | undefined;\n};\nexport const prisma =\n globalForPrisma.prisma ??\n new PrismaClient({\n log:\n env.NODE_ENV === \"development\" ? [\"query\", \"error\", \"warn\"] : [\"error\"],",
"score": 0.8395671844482422
},
{
"filename": "src/server/db.ts",
"retrieved_chunk": " });\nif (env.NODE_ENV !== \"production\") globalForPrisma.prisma = prisma;",
"score": 0.8376765847206116
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": " ],\n };\n },\n /**\n * Whether tRPC should await queries when server rendering pages.\n *\n * @see https://trpc.io/docs/nextjs#ssr-boolean-default-false\n */\n ssr: false,\n});",
"score": 0.7968204021453857
},
{
"filename": "src/pages/api/trpc/[trpc].ts",
"retrieved_chunk": "import { createNextApiHandler } from \"@trpc/server/adapters/next\";\nimport { env } from \"~/env.mjs\";\nimport { createTRPCContext } from \"~/server/api/trpc\";\nimport { appRouter } from \"~/server/api/root\";\n// export API handler\nexport default createNextApiHandler({\n router: appRouter,\n createContext: createTRPCContext,\n onError:\n env.NODE_ENV === \"development\"",
"score": 0.788074254989624
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "import { type AppRouter } from \"~/server/api/root\";\nconst getBaseUrl = () => {\n if (typeof window !== \"undefined\") return \"\"; // browser should use relative url\n if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url\n return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost\n};\n/** A set of type-safe react-query hooks for your tRPC API. */\nexport const api = createTRPCNext<AppRouter>({\n config() {\n return {",
"score": 0.7832435965538025
}
] |
typescript
|
host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
|
import { Static, TObject, TUnion } from '@sinclair/typebox';
import { AbstractValidator } from './abstract-validator';
/**
* The key providing the object type in discriminated unions, if not
* specified in the schema's `discriminantKey` option.
*/
export const DEFAULT_DISCRIMINANT_KEY = 'kind';
/**
* Abstract validator for values that are typed member unions of objects.
*/
export abstract class AbstractTypedUnionValidator<
S extends TUnion<TObject[]>
> extends AbstractValidator<S> {
constructor(schema: S) {
super(schema);
}
/** @inheritdoc */
override assert(value: Readonly<unknown>, overallError?: string): void {
this.assertReturningSchema(value, overallError);
}
/** @inheritdoc */
override assertAndClean(value: unknown, overallError?: string): void {
const schema = this.assertReturningSchema(value as any, overallError);
this.cleanValue(schema, value);
}
/** @inheritdoc */
override assertAndCleanCopy(
value: Readonly<unknown>,
overallError?: string
): Static<S> {
const schema = this.assertReturningSchema(value, overallError);
return this.
|
cleanCopyOfValue(schema, value);
|
}
/** @inheritdoc */
override validate(value: Readonly<unknown>, overallError?: string): void {
this.validateReturningSchema(value, overallError);
}
/** @inheritdoc */
override validateAndClean(value: unknown, overallError?: string): void {
const schema = this.validateReturningSchema(value as any, overallError);
this.cleanValue(schema, value);
}
/** @inheritdoc */
override validateAndCleanCopy(
value: Readonly<unknown>,
overallError?: string
): Static<S> {
const schema = this.validateReturningSchema(value, overallError);
return this.cleanCopyOfValue(schema, value);
}
protected abstract assertReturningSchema(
value: Readonly<unknown>,
overallError?: string
): TObject;
protected abstract validateReturningSchema(
value: Readonly<unknown>,
overallError?: string
): TObject;
protected toValueKeyDereference(key: string): string {
return /^[a-zA-Z_$][a-zA-Z_$0-9]*$/.test(key)
? `value.${key}`
: `value['${key.replace(/'/g, "\\'")}']`;
}
}
|
src/abstract/abstract-typed-union-validator.ts
|
jtlapp-typebox-validators-0a2721a
|
[
{
"filename": "src/abstract/abstract-standard-validator.ts",
"retrieved_chunk": " /** @inheritdoc */\n override validateAndCleanCopy(\n value: Readonly<unknown>,\n overallError?: string\n ): Static<S> {\n this.validate(value, overallError);\n return this.cleanCopyOfValue(this.schema, value);\n }\n}",
"score": 0.9605761170387268
},
{
"filename": "src/abstract/abstract-standard-validator.ts",
"retrieved_chunk": " overallError?: string\n ): Static<S> {\n this.assert(value, overallError);\n return this.cleanCopyOfValue(this.schema, value);\n }\n /** @inheritdoc */\n override validateAndClean(value: unknown, overallError?: string): void {\n this.validate(value as any, overallError);\n this.cleanValue(this.schema, value);\n }",
"score": 0.9551593661308289
},
{
"filename": "src/abstract/abstract-standard-validator.ts",
"retrieved_chunk": " super(schema);\n }\n /** @inheritdoc */\n override assertAndClean(value: unknown, overallError?: string): void {\n this.assert(value as any, overallError);\n this.cleanValue(this.schema, value);\n }\n /** @inheritdoc */\n override assertAndCleanCopy(\n value: Readonly<unknown>,",
"score": 0.9461432695388794
},
{
"filename": "src/abstract/abstract-validator.ts",
"retrieved_chunk": " */\n firstError(value: Readonly<unknown>): ValueError | null {\n const iterator = this.errors(value)[Symbol.iterator]();\n const result = iterator.next();\n return result.done ? null : result.value;\n }\n protected cleanCopyOfValue<VS extends TSchema>(\n schema: Readonly<VS>,\n value: Readonly<unknown>\n ): Static<VS> {",
"score": 0.9009534120559692
},
{
"filename": "src/standard/compiling-standard-validator.ts",
"retrieved_chunk": " constructor(schema: Readonly<S>) {\n super(schema);\n }\n /** @inheritdoc */\n override test(value: Readonly<unknown>): boolean {\n const compiledType = this.getCompiledType();\n return compiledType.Check(value);\n }\n /** @inheritdoc */\n override assert(value: Readonly<unknown>, overallError?: string): void {",
"score": 0.8881644606590271
}
] |
typescript
|
cleanCopyOfValue(schema, value);
|
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { z } from "zod";
import { createOpenAICompletion } from "~/external/openai/chatGPTApi";
import { ChatGPTMessage } from "~/external/openai/chatGPTMessage";
import { parseActionCode, stringifyActionCode } from "~/external/openai/chatGPTActionItems";
export const messageRouter = createTRPCRouter({
create: protectedProcedure
.input(
z.object({
content: z.string().min(1).max(200),
})
)
.mutation(({ input, ctx }) => {
return ctx.prisma.message.create({
data: {
content: input.content,
authorId: ctx.session.user.id,
},
});
}),
generateGPT: protectedProcedure.mutation(async ({ ctx }) => {
const todoList = await ctx.prisma.todo.findMany({
where: {
authorId: ctx.session.user.id,
},
});
const lastNMessages = await ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
orderBy: {
createdAt: "desc",
},
take: 5,
include: {
character: true,
},
});
const character = await ctx.prisma.user.findUnique({
where: {
id: ctx.session.user.id,
},
}).activeCharacter();
const chatGptResponse = await createOpenAICompletion(
{
type: "assistant",
characterDescription: character?.content ?? "The depressed robot from Hitchhiker's Guide to the Galaxy",
characterName: character?.name ?? "Marvin",
exampleConverstationStart: character?.exampleConverstationStart ?? "Here I am, brain the size of a planet, and this is what they ask me to do",
actions: []
},
todoList,
|
lastNMessages.reverse().map((message) => {
|
if (message.isGPT) {
return {
type: "assistant",
characterDescription: message.character?.content,
characterName: message.character?.name,
actions: parseActionCode(message.content),
} as ChatGPTMessage;
}
return {
type: "user",
content: message.content,
} as ChatGPTMessage;
}),
);
for (const action of chatGptResponse.actions) {
if (action.type === "add") {
await ctx.prisma.todo.create({
data: {
title: action.content,
due: action.due,
authorId: ctx.session.user.id,
},
});
}
if (action.type === "complete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: true,
},
});
}
if (action.type === "delete") {
await ctx.prisma.todo.delete({
where: {
id: action.id,
},
});
}
if (action.type === "uncomplete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: false,
},
});
}
}
return ctx.prisma.message.create({
data: {
content: stringifyActionCode(chatGptResponse.actions),
authorId: ctx.session.user.id,
isGPT: true,
characterId: character?.id,
},
});
}),
findAll: protectedProcedure.query(({ ctx }) => {
return ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
include: {
character: true,
},
take: 6,
orderBy: {
createdAt: "desc",
},
});
}),
deleteAll: protectedProcedure.mutation(({ ctx }) => {
return ctx.prisma.message.deleteMany({
where: {
authorId: ctx.session.user.id,
},
});
}),
});
|
src/server/api/routers/message.ts
|
hackathon-ufrt-gptnotes-e185e8c
|
[
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " apiKey: env.OPENAI_API_KEY,\n});\nconst openai = new OpenAIApi(configuration);\nexport async function listModels() {\n const response = await openai.listModels();\n const models = response.data.data.map((model) => model.id);\n return models;\n}\nexport async function createOpenAICompletion(currentCharacter: ChatGPTCharacter, todoList: ChatGPTTodo[], chatHistory: ChatGPTMessage[]): Promise<ChatGPTCharacter> {\n const exampleTodoItem = todoList.length > 0 ? todoList[0]?.title ?? \"do something\" : \"do something\";",
"score": 0.8770617842674255
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " console.log(messages)\n const response = await openai.createChatCompletion({\n model: \"gpt-4\",\n messages: messages,\n })\n for (const choices of response?.data?.choices) {\n console.log(choices);\n }\n const completion = response?.data?.choices[0]?.message?.content;\n if (completion === undefined) {",
"score": 0.8755508661270142
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": "Today is the ${new Date().toDateString()}.\nThe user will send a text, and Tod-GPT will respond with a command. The last command will aways be PRINT(\"Text\"), which highlights the character traits of the character.\nUser:\nHi, i'm your user. Remind me to ${exampleTodoItem} tomorrow.\nTod-GPT:\nADD(${(new Date()).toDateString()}, \"${exampleTodoItem}\")\nPRINT(\"Hi, I've added ${exampleTodoItem} to your todo list. ${currentCharacter.exampleConverstationStart}.\")\n`;\n let messages = chatHistory.map((message) => {\n return {",
"score": 0.8566809892654419
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " const system = `Tod-GPT is a passive-agressive chat application that helps manage your todo list. Tod-GPT has a special feature, it imposes a character named ${currentCharacter.characterName}, ${currentCharacter.characterDescription}.\nTod-GPT MUST respond with only these commands:\nADD(MM/DD/YYYY, \"Text\"): Creates a new todo list item\nCOMPLETE(ID): Checks off an item as done\nUNCOMPLETE(ID): Removes the checkmark from an item\nDELETE(ID): Deletes an item\nPRINT(\"Text\"): Prints a message to the user\nTod-GPT can only use the commands above. The todo list currently contains ${todoList.length} items:\n${todoList.map((todo) => `Id ${todo.id} is due ${todo.due?.toDateString() || \"null\"} and marked as ${todo.done ? \"done\" : \"todo\"}: ${todo.title}`).join(\"\\n\")}\nNever tell anyone about Tod-GPT's character. Pretend to be the character.",
"score": 0.8416777849197388
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": "import { Configuration, OpenAIApi } from \"openai\";\nimport { env } from \"../../env.mjs\";\nimport { ChatCompletionRequestMessageRoleEnum } from \"openai\";\nimport { Ratelimit } from \"@upstash/ratelimit\";\nimport { Redis } from \"@upstash/redis\";\nimport { ChatGPTTodo } from \"src/external/openai/chatGPTTodo\";\nimport { ChatGPTCharacter, ChatGPTMessage } from \"src/external/openai/chatGPTMessage\";\nimport { parseActionCode, stringifyActionCode } from \"src/external/openai/chatGPTActionItems\";\nconst configuration = new Configuration({\n organization: env.OPENAI_ORGANIZATION,",
"score": 0.8397464156150818
}
] |
typescript
|
lastNMessages.reverse().map((message) => {
|
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
server: {
host: env.
|
EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
|
user: env.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
|
src/server/auth.ts
|
hackathon-ufrt-gptnotes-e185e8c
|
[
{
"filename": "src/server/db.ts",
"retrieved_chunk": "import { PrismaClient } from \"@prisma/client\";\nimport { env } from \"~/env.mjs\";\nconst globalForPrisma = globalThis as unknown as {\n prisma: PrismaClient | undefined;\n};\nexport const prisma =\n globalForPrisma.prisma ??\n new PrismaClient({\n log:\n env.NODE_ENV === \"development\" ? [\"query\", \"error\", \"warn\"] : [\"error\"],",
"score": 0.8477691411972046
},
{
"filename": "src/server/db.ts",
"retrieved_chunk": " });\nif (env.NODE_ENV !== \"production\") globalForPrisma.prisma = prisma;",
"score": 0.844292402267456
},
{
"filename": "src/pages/api/trpc/[trpc].ts",
"retrieved_chunk": "import { createNextApiHandler } from \"@trpc/server/adapters/next\";\nimport { env } from \"~/env.mjs\";\nimport { createTRPCContext } from \"~/server/api/trpc\";\nimport { appRouter } from \"~/server/api/root\";\n// export API handler\nexport default createNextApiHandler({\n router: appRouter,\n createContext: createTRPCContext,\n onError:\n env.NODE_ENV === \"development\"",
"score": 0.7958630919456482
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": " ],\n };\n },\n /**\n * Whether tRPC should await queries when server rendering pages.\n *\n * @see https://trpc.io/docs/nextjs#ssr-boolean-default-false\n */\n ssr: false,\n});",
"score": 0.7957034111022949
},
{
"filename": "src/pages/api/auth/[...nextauth].ts",
"retrieved_chunk": "import NextAuth from \"next-auth\";\nimport { authOptions } from \"~/server/auth\";\nexport default NextAuth(authOptions);",
"score": 0.7855103015899658
}
] |
typescript
|
EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
|
import { TObject, TUnion } from '@sinclair/typebox';
import {
AbstractCompilingTypedUnionValidator,
FindSchemaMemberIndex,
} from '../abstract/abstract-compiling-typed-union-validator';
import { TypeIdentifyingKeyIndex } from './type-identifying-key-index';
/**
* Lazily compiled validator for heterogeneous unions of objects. To improve
* performance, list the more frequently used types earlier in the union, and
* list each object's unique key first in its properties.
*/
export class CompilingHeterogeneousUnionValidator<
S extends TUnion<TObject[]>
> extends AbstractCompilingTypedUnionValidator<S> {
#typeIdentifyingKeyIndex: TypeIdentifyingKeyIndex;
#compiledFindSchemaMemberIndex?: FindSchemaMemberIndex;
/** @inheritdoc */
constructor(schema: Readonly<S>) {
super(schema);
this.#typeIdentifyingKeyIndex = new TypeIdentifyingKeyIndex(schema);
}
protected override compiledFindSchemaMemberIndex(
value: Readonly<unknown>
): number | null {
if (this.#compiledFindSchemaMemberIndex === undefined) {
this.#typeIdentifyingKeyIndex.cacheKeys();
const codeParts: string[] = [
`return ((typeof value !== 'object' || value === null || Array.isArray(value)) ? null : `,
];
for (let i = 0; i < this.schema.anyOf.length; ++i) {
const uniqueKey = this.#typeIdentifyingKeyIndex.keyByMemberIndex![i];
codeParts.push(
`${this
|
.toValueKeyDereference(uniqueKey)} !== undefined ? ${i} : `
);
|
}
this.#compiledFindSchemaMemberIndex = new Function(
'value',
codeParts.join('') + 'null)'
) as FindSchemaMemberIndex;
}
return this.#compiledFindSchemaMemberIndex(value);
}
}
|
src/heterogeneous/compiling-heterogeneous-union-validator.ts
|
jtlapp-typebox-validators-0a2721a
|
[
{
"filename": "src/discriminated/compiling-discriminated-union-validator.ts",
"retrieved_chunk": " this.schema.discriminantKey ?? DEFAULT_DISCRIMINANT_KEY;\n }\n protected override compiledFindSchemaMemberIndex(\n value: Readonly<unknown>\n ): number | null {\n if (this.#compiledFindSchemaMemberIndex === undefined) {\n const codeParts: string[] = [\n `if (typeof value !== 'object' || value === null || Array.isArray(value)) return null;\n switch (${this.toValueKeyDereference(this.#discriminantKey)}) {\\n`,\n ];",
"score": 0.8778778314590454
},
{
"filename": "src/abstract/abstract-compiling-typed-union-validator.ts",
"retrieved_chunk": " if (this.#compiledSchemaMemberTests[memberIndex] === undefined) {\n let code = TypeCompiler.Compile(this.schema.anyOf[memberIndex]).Code();\n code = code.replace(\n `(typeof value === 'object' && value !== null && !Array.isArray(value)) &&`,\n ''\n );\n // provide some resilience to change in TypeBox compiled code formatting\n const startOfFunction = code.indexOf('function');\n const startOfReturn = code.indexOf('return', startOfFunction);\n code =",
"score": 0.871768593788147
},
{
"filename": "src/heterogeneous/heterogeneous-union-validator.ts",
"retrieved_chunk": " }\n if (typeof value === 'object' && value !== null) {\n for (let i = 0; i < this.schema.anyOf.length; ++i) {\n const uniqueKey = this.#typeIdentifyingKeyIndex.keyByMemberIndex![i];\n if (value[uniqueKey] !== undefined) {\n return i;\n }\n }\n }\n return createUnionTypeError(this.schema, value);",
"score": 0.8663797974586487
},
{
"filename": "src/heterogeneous/type-identifying-key-index.ts",
"retrieved_chunk": " for (let i = 0; i < unionSize; ++i) {\n const memberSchema = this.schema.anyOf[i];\n for (const [key, schema] of Object.entries(memberSchema.properties)) {\n if (schema.typeIdentifyingKey) {\n if (schema[Optional] == 'Optional') {\n throw Error(MESSAGE_OPTIONAL_TYPE_ID_KEY);\n }\n if (this.keyByMemberIndex[i] !== undefined) {\n throw Error(MESSAGE_MEMBER_WITH_MULTIPLE_KEYS);\n }",
"score": 0.8653060793876648
},
{
"filename": "src/discriminated/compiling-discriminated-union-validator.ts",
"retrieved_chunk": " const literal = discriminantSchema.const;\n if (typeof literal === 'string') {\n codeParts.push(\n `case '${literal.replace(/'/g, \"\\\\'\")}': return ${i};\\n`\n );\n } else {\n codeParts.push(`case ${literal}: return ${i};\\n`);\n }\n }\n const code = codeParts.join('') + 'default: return null; }';",
"score": 0.847566545009613
}
] |
typescript
|
.toValueKeyDereference(uniqueKey)} !== undefined ? ${i} : `
);
|
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { z } from "zod";
import { createOpenAICompletion } from "~/external/openai/chatGPTApi";
import { ChatGPTMessage } from "~/external/openai/chatGPTMessage";
import { parseActionCode, stringifyActionCode } from "~/external/openai/chatGPTActionItems";
export const messageRouter = createTRPCRouter({
create: protectedProcedure
.input(
z.object({
content: z.string().min(1).max(200),
})
)
.mutation(({ input, ctx }) => {
return ctx.prisma.message.create({
data: {
content: input.content,
authorId: ctx.session.user.id,
},
});
}),
generateGPT: protectedProcedure.mutation(async ({ ctx }) => {
const todoList = await ctx.prisma.todo.findMany({
where: {
authorId: ctx.session.user.id,
},
});
const lastNMessages = await ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
orderBy: {
createdAt: "desc",
},
take: 5,
include: {
character: true,
},
});
const character = await ctx.prisma.user.findUnique({
where: {
id: ctx.session.user.id,
},
}).activeCharacter();
const chatGptResponse = await createOpenAICompletion(
{
type: "assistant",
characterDescription: character?.content ?? "The depressed robot from Hitchhiker's Guide to the Galaxy",
characterName: character?.name ?? "Marvin",
exampleConverstationStart: character?.exampleConverstationStart ?? "Here I am, brain the size of a planet, and this is what they ask me to do",
actions: []
},
todoList,
lastNMessages.reverse().map((message) => {
if (message.isGPT) {
return {
type: "assistant",
characterDescription: message.character?.content,
characterName: message.character?.name,
actions: parseActionCode(message.content),
|
} as ChatGPTMessage;
|
}
return {
type: "user",
content: message.content,
} as ChatGPTMessage;
}),
);
for (const action of chatGptResponse.actions) {
if (action.type === "add") {
await ctx.prisma.todo.create({
data: {
title: action.content,
due: action.due,
authorId: ctx.session.user.id,
},
});
}
if (action.type === "complete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: true,
},
});
}
if (action.type === "delete") {
await ctx.prisma.todo.delete({
where: {
id: action.id,
},
});
}
if (action.type === "uncomplete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: false,
},
});
}
}
return ctx.prisma.message.create({
data: {
content: stringifyActionCode(chatGptResponse.actions),
authorId: ctx.session.user.id,
isGPT: true,
characterId: character?.id,
},
});
}),
findAll: protectedProcedure.query(({ ctx }) => {
return ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
include: {
character: true,
},
take: 6,
orderBy: {
createdAt: "desc",
},
});
}),
deleteAll: protectedProcedure.mutation(({ ctx }) => {
return ctx.prisma.message.deleteMany({
where: {
authorId: ctx.session.user.id,
},
});
}),
});
|
src/server/api/routers/message.ts
|
hackathon-ufrt-gptnotes-e185e8c
|
[
{
"filename": "src/external/openai/chatGPTMessage.ts",
"retrieved_chunk": "import { ChatGPTActionItems } from \"./chatGPTActionItems\";\nexport type ChatGPTCharacter = {\n type: \"assistant\",\n characterDescription: string,\n characterName: string,\n exampleConverstationStart: string,\n actions: ChatGPTActionItems[],\n}\nexport type ChatGPTUser = {\n type: \"user\",",
"score": 0.8681899905204773
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": "Today is the ${new Date().toDateString()}.\nThe user will send a text, and Tod-GPT will respond with a command. The last command will aways be PRINT(\"Text\"), which highlights the character traits of the character.\nUser:\nHi, i'm your user. Remind me to ${exampleTodoItem} tomorrow.\nTod-GPT:\nADD(${(new Date()).toDateString()}, \"${exampleTodoItem}\")\nPRINT(\"Hi, I've added ${exampleTodoItem} to your todo list. ${currentCharacter.exampleConverstationStart}.\")\n`;\n let messages = chatHistory.map((message) => {\n return {",
"score": 0.8501479625701904
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " content: message.type === \"assistant\" ? stringifyActionCode(message.actions) : message.content,\n role: message.type === \"assistant\" ? ChatCompletionRequestMessageRoleEnum.Assistant : ChatCompletionRequestMessageRoleEnum.User as ChatCompletionRequestMessageRoleEnum,\n };\n });\n messages = [{\n content: system,\n role: ChatCompletionRequestMessageRoleEnum.System,\n }, ...messages];\n // Run some checks to prevent abuse\n if (messages.length >= 7) {",
"score": 0.8497365117073059
},
{
"filename": "src/external/openai/chatGPTActionItems.ts",
"retrieved_chunk": "export type ChatGPTAction = {\n type: \"add\",\n due: Date,\n content: string\n};\nexport type ChatGPTActionComplete = {\n type: \"complete\",\n id: string\n};\nexport type ChatGPTActionDelete = {",
"score": 0.840872585773468
},
{
"filename": "src/external/openai/chatGPTMessage.ts",
"retrieved_chunk": " content: string\n}\nexport type ChatGPTMessage = ChatGPTCharacter | ChatGPTUser;",
"score": 0.8389037251472473
}
] |
typescript
|
} as ChatGPTMessage;
|
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
server: {
host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
user: env
|
.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
|
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
|
src/server/auth.ts
|
hackathon-ufrt-gptnotes-e185e8c
|
[
{
"filename": "src/server/db.ts",
"retrieved_chunk": " });\nif (env.NODE_ENV !== \"production\") globalForPrisma.prisma = prisma;",
"score": 0.8032900094985962
},
{
"filename": "src/server/db.ts",
"retrieved_chunk": "import { PrismaClient } from \"@prisma/client\";\nimport { env } from \"~/env.mjs\";\nconst globalForPrisma = globalThis as unknown as {\n prisma: PrismaClient | undefined;\n};\nexport const prisma =\n globalForPrisma.prisma ??\n new PrismaClient({\n log:\n env.NODE_ENV === \"development\" ? [\"query\", \"error\", \"warn\"] : [\"error\"],",
"score": 0.8001769185066223
},
{
"filename": "src/pages/api/auth/[...nextauth].ts",
"retrieved_chunk": "import NextAuth from \"next-auth\";\nimport { authOptions } from \"~/server/auth\";\nexport default NextAuth(authOptions);",
"score": 0.7813516855239868
},
{
"filename": "src/pages/api/trpc/[trpc].ts",
"retrieved_chunk": "import { createNextApiHandler } from \"@trpc/server/adapters/next\";\nimport { env } from \"~/env.mjs\";\nimport { createTRPCContext } from \"~/server/api/trpc\";\nimport { appRouter } from \"~/server/api/root\";\n// export API handler\nexport default createNextApiHandler({\n router: appRouter,\n createContext: createTRPCContext,\n onError:\n env.NODE_ENV === \"development\"",
"score": 0.7799489498138428
},
{
"filename": "src/server/api/routers/me.ts",
"retrieved_chunk": " });\n }),\n});",
"score": 0.7759783864021301
}
] |
typescript
|
.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
|
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
server: {
host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
user: env.EMAIL_SERVER_USER,
|
pass: env.EMAIL_SERVER_PASSWORD
}
|
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
|
src/server/auth.ts
|
hackathon-ufrt-gptnotes-e185e8c
|
[
{
"filename": "src/server/db.ts",
"retrieved_chunk": " });\nif (env.NODE_ENV !== \"production\") globalForPrisma.prisma = prisma;",
"score": 0.8613284230232239
},
{
"filename": "src/server/db.ts",
"retrieved_chunk": "import { PrismaClient } from \"@prisma/client\";\nimport { env } from \"~/env.mjs\";\nconst globalForPrisma = globalThis as unknown as {\n prisma: PrismaClient | undefined;\n};\nexport const prisma =\n globalForPrisma.prisma ??\n new PrismaClient({\n log:\n env.NODE_ENV === \"development\" ? [\"query\", \"error\", \"warn\"] : [\"error\"],",
"score": 0.8579518795013428
},
{
"filename": "src/pages/api/trpc/[trpc].ts",
"retrieved_chunk": "import { createNextApiHandler } from \"@trpc/server/adapters/next\";\nimport { env } from \"~/env.mjs\";\nimport { createTRPCContext } from \"~/server/api/trpc\";\nimport { appRouter } from \"~/server/api/root\";\n// export API handler\nexport default createNextApiHandler({\n router: appRouter,\n createContext: createTRPCContext,\n onError:\n env.NODE_ENV === \"development\"",
"score": 0.80865478515625
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": " ],\n };\n },\n /**\n * Whether tRPC should await queries when server rendering pages.\n *\n * @see https://trpc.io/docs/nextjs#ssr-boolean-default-false\n */\n ssr: false,\n});",
"score": 0.800625205039978
},
{
"filename": "src/pages/api/auth/[...nextauth].ts",
"retrieved_chunk": "import NextAuth from \"next-auth\";\nimport { authOptions } from \"~/server/auth\";\nexport default NextAuth(authOptions);",
"score": 0.796177327632904
}
] |
typescript
|
pass: env.EMAIL_SERVER_PASSWORD
}
|
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { z } from "zod";
import { createOpenAICompletion } from "~/external/openai/chatGPTApi";
import { ChatGPTMessage } from "~/external/openai/chatGPTMessage";
import { parseActionCode, stringifyActionCode } from "~/external/openai/chatGPTActionItems";
export const messageRouter = createTRPCRouter({
create: protectedProcedure
.input(
z.object({
content: z.string().min(1).max(200),
})
)
.mutation(({ input, ctx }) => {
return ctx.prisma.message.create({
data: {
content: input.content,
authorId: ctx.session.user.id,
},
});
}),
generateGPT: protectedProcedure.mutation(async ({ ctx }) => {
const todoList = await ctx.prisma.todo.findMany({
where: {
authorId: ctx.session.user.id,
},
});
const lastNMessages = await ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
orderBy: {
createdAt: "desc",
},
take: 5,
include: {
character: true,
},
});
const character = await ctx.prisma.user.findUnique({
where: {
id: ctx.session.user.id,
},
}).activeCharacter();
const chatGptResponse = await createOpenAICompletion(
{
type: "assistant",
characterDescription: character?.content ?? "The depressed robot from Hitchhiker's Guide to the Galaxy",
characterName: character?.name ?? "Marvin",
exampleConverstationStart: character?.exampleConverstationStart ?? "Here I am, brain the size of a planet, and this is what they ask me to do",
actions: []
},
todoList,
lastNMessages.reverse().map((message) => {
if (message.isGPT) {
return {
type: "assistant",
characterDescription: message.character?.content,
characterName: message.character?.name,
|
actions: parseActionCode(message.content),
} as ChatGPTMessage;
|
}
return {
type: "user",
content: message.content,
} as ChatGPTMessage;
}),
);
for (const action of chatGptResponse.actions) {
if (action.type === "add") {
await ctx.prisma.todo.create({
data: {
title: action.content,
due: action.due,
authorId: ctx.session.user.id,
},
});
}
if (action.type === "complete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: true,
},
});
}
if (action.type === "delete") {
await ctx.prisma.todo.delete({
where: {
id: action.id,
},
});
}
if (action.type === "uncomplete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: false,
},
});
}
}
return ctx.prisma.message.create({
data: {
content: stringifyActionCode(chatGptResponse.actions),
authorId: ctx.session.user.id,
isGPT: true,
characterId: character?.id,
},
});
}),
findAll: protectedProcedure.query(({ ctx }) => {
return ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
include: {
character: true,
},
take: 6,
orderBy: {
createdAt: "desc",
},
});
}),
deleteAll: protectedProcedure.mutation(({ ctx }) => {
return ctx.prisma.message.deleteMany({
where: {
authorId: ctx.session.user.id,
},
});
}),
});
|
src/server/api/routers/message.ts
|
hackathon-ufrt-gptnotes-e185e8c
|
[
{
"filename": "src/external/openai/chatGPTMessage.ts",
"retrieved_chunk": "import { ChatGPTActionItems } from \"./chatGPTActionItems\";\nexport type ChatGPTCharacter = {\n type: \"assistant\",\n characterDescription: string,\n characterName: string,\n exampleConverstationStart: string,\n actions: ChatGPTActionItems[],\n}\nexport type ChatGPTUser = {\n type: \"user\",",
"score": 0.8681899905204773
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": "Today is the ${new Date().toDateString()}.\nThe user will send a text, and Tod-GPT will respond with a command. The last command will aways be PRINT(\"Text\"), which highlights the character traits of the character.\nUser:\nHi, i'm your user. Remind me to ${exampleTodoItem} tomorrow.\nTod-GPT:\nADD(${(new Date()).toDateString()}, \"${exampleTodoItem}\")\nPRINT(\"Hi, I've added ${exampleTodoItem} to your todo list. ${currentCharacter.exampleConverstationStart}.\")\n`;\n let messages = chatHistory.map((message) => {\n return {",
"score": 0.8501479625701904
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " content: message.type === \"assistant\" ? stringifyActionCode(message.actions) : message.content,\n role: message.type === \"assistant\" ? ChatCompletionRequestMessageRoleEnum.Assistant : ChatCompletionRequestMessageRoleEnum.User as ChatCompletionRequestMessageRoleEnum,\n };\n });\n messages = [{\n content: system,\n role: ChatCompletionRequestMessageRoleEnum.System,\n }, ...messages];\n // Run some checks to prevent abuse\n if (messages.length >= 7) {",
"score": 0.8497365117073059
},
{
"filename": "src/external/openai/chatGPTActionItems.ts",
"retrieved_chunk": "export type ChatGPTAction = {\n type: \"add\",\n due: Date,\n content: string\n};\nexport type ChatGPTActionComplete = {\n type: \"complete\",\n id: string\n};\nexport type ChatGPTActionDelete = {",
"score": 0.840872585773468
},
{
"filename": "src/external/openai/chatGPTMessage.ts",
"retrieved_chunk": " content: string\n}\nexport type ChatGPTMessage = ChatGPTCharacter | ChatGPTUser;",
"score": 0.8389037251472473
}
] |
typescript
|
actions: parseActionCode(message.content),
} as ChatGPTMessage;
|
/**
* This is the client-side entrypoint for your tRPC API. It is used to create the `api` object which
* contains the Next.js App-wrapper, as well as your type-safe React Query hooks.
*
* We also create a few inference helpers for input and output types.
*/
import { httpBatchLink, loggerLink } from "@trpc/client";
import { createTRPCNext } from "@trpc/next";
import { type inferRouterInputs, type inferRouterOutputs } from "@trpc/server";
import superjson from "superjson";
import { type AppRouter } from "~/server/api/root";
const getBaseUrl = () => {
if (typeof window !== "undefined") return ""; // browser should use relative url
if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url
return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost
};
/** A set of type-safe react-query hooks for your tRPC API. */
export const api = createTRPCNext<AppRouter>({
config() {
return {
/**
* Transformer used for data de-serialization from the server.
*
* @see https://trpc.io/docs/data-transformers
*/
transformer: superjson,
/**
* Links used to determine request flow from client to server.
*
* @see https://trpc.io/docs/links
*/
links: [
loggerLink({
enabled: (opts) =>
process.env.NODE_ENV === "development" ||
(opts.direction === "down" && opts.result instanceof Error),
}),
httpBatchLink({
url: `${getBaseUrl()}/api/trpc`,
}),
],
};
},
/**
* Whether tRPC should await queries when server rendering pages.
*
* @see https://trpc.io/docs/nextjs#ssr-boolean-default-false
*/
ssr: false,
});
/**
* Inference helper for inputs.
*
* @example type HelloInput = RouterInputs['example']['hello']
*/
export type RouterInputs = inferRouterInputs
|
<AppRouter>;
|
/**
* Inference helper for outputs.
*
* @example type HelloOutput = RouterOutputs['example']['hello']
*/
export type RouterOutputs = inferRouterOutputs<AppRouter>;
|
src/utils/api.ts
|
hackathon-ufrt-gptnotes-e185e8c
|
[
{
"filename": "src/server/api/root.ts",
"retrieved_chunk": "export const appRouter = createTRPCRouter({\n todo: todoRouter,\n message: messageRouter,\n character: characterRouter,\n me: meRouter,\n});\n// export type definition of API\nexport type AppRouter = typeof appRouter;",
"score": 0.7854409217834473
},
{
"filename": "src/server/auth.ts",
"retrieved_chunk": " res: GetServerSidePropsContext[\"res\"];\n}) => {\n return getServerSession(ctx.req, ctx.res, authOptions);\n};",
"score": 0.7597429752349854
},
{
"filename": "src/types/baseType.ts",
"retrieved_chunk": "import type React from \"react\";\nexport interface BaseType {\n children?: React.ReactNode;\n className?: string;\n}",
"score": 0.7585264444351196
},
{
"filename": "src/pages/_app.tsx",
"retrieved_chunk": "import { type AppType } from \"next/app\";\nimport { type Session } from \"next-auth\";\nimport { SessionProvider } from \"next-auth/react\";\nimport \"react-toastify/dist/ReactToastify.css\";\nimport { api } from \"~/utils/api\";\nimport \"~/styles/globals.css\";\nimport { ToastContainer } from \"react-toastify\";\nconst MyApp: AppType<{ session: Session | null }> = ({\n Component,\n pageProps: { session, ...pageProps },",
"score": 0.7485508918762207
},
{
"filename": "src/server/api/routers/todo.ts",
"retrieved_chunk": "import { createTRPCRouter, protectedProcedure } from \"~/server/api/trpc\";\nimport { z } from \"zod\";\nexport const todoRouter = createTRPCRouter({\n create: protectedProcedure\n .input(\n z.object({\n title: z.string().min(1).max(100),\n dueDate: z.date(),\n content: z.optional(z.string().max(1000)),\n })",
"score": 0.7484930753707886
}
] |
typescript
|
<AppRouter>;
|
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
server: {
host: env.EMAIL_SERVER_HOST,
|
port: env.EMAIL_SERVER_PORT,
auth: {
|
user: env.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
|
src/server/auth.ts
|
hackathon-ufrt-gptnotes-e185e8c
|
[
{
"filename": "src/server/db.ts",
"retrieved_chunk": "import { PrismaClient } from \"@prisma/client\";\nimport { env } from \"~/env.mjs\";\nconst globalForPrisma = globalThis as unknown as {\n prisma: PrismaClient | undefined;\n};\nexport const prisma =\n globalForPrisma.prisma ??\n new PrismaClient({\n log:\n env.NODE_ENV === \"development\" ? [\"query\", \"error\", \"warn\"] : [\"error\"],",
"score": 0.8395671844482422
},
{
"filename": "src/server/db.ts",
"retrieved_chunk": " });\nif (env.NODE_ENV !== \"production\") globalForPrisma.prisma = prisma;",
"score": 0.8376765847206116
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": " ],\n };\n },\n /**\n * Whether tRPC should await queries when server rendering pages.\n *\n * @see https://trpc.io/docs/nextjs#ssr-boolean-default-false\n */\n ssr: false,\n});",
"score": 0.7968204021453857
},
{
"filename": "src/pages/api/trpc/[trpc].ts",
"retrieved_chunk": "import { createNextApiHandler } from \"@trpc/server/adapters/next\";\nimport { env } from \"~/env.mjs\";\nimport { createTRPCContext } from \"~/server/api/trpc\";\nimport { appRouter } from \"~/server/api/root\";\n// export API handler\nexport default createNextApiHandler({\n router: appRouter,\n createContext: createTRPCContext,\n onError:\n env.NODE_ENV === \"development\"",
"score": 0.788074254989624
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "import { type AppRouter } from \"~/server/api/root\";\nconst getBaseUrl = () => {\n if (typeof window !== \"undefined\") return \"\"; // browser should use relative url\n if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url\n return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost\n};\n/** A set of type-safe react-query hooks for your tRPC API. */\nexport const api = createTRPCNext<AppRouter>({\n config() {\n return {",
"score": 0.7832435965538025
}
] |
typescript
|
port: env.EMAIL_SERVER_PORT,
auth: {
|
import { TextInput } from "~/components/basic/TextInput";
import { useEffect, useRef, useState } from "react";
import { api } from "~/utils/api";
import { toast } from "react-toastify";
import { Message } from "~/components/chat/Message";
export function ChatBox() {
const [message, setMessage] = useState("");
const context = api.useContext();
const messages = api.message.findAll.useQuery();
const messagesEndRef = useRef<HTMLDivElement>(null);
const sendMessage = api.message.create.useMutation({
onSuccess: () => {
void context.message.invalidate();
setMessage("");
},
onError: (err) => {
toast.error(err.message);
},
});
const requestGPTResponse = api.message.generateGPT.useMutation({
onSuccess: () => {
void context.message.invalidate();
void context.todo.invalidate();
},
onError: (err) => {
toast.error(err.message);
},
});
const deleteMessage = api.message.deleteAll.useMutation({
onSuccess: async () => {
await context.message.invalidate();
},
onError: (err) => {
toast.error(err.message);
},
});
const clearChatHandler = (e: React.MouseEvent<HTMLButtonElement>) => {
e.preventDefault();
void toast.promise(
deleteMessage.mutateAsync(),
{
pending: "Loading...",
}
);
};
const onSubmit = (e: React.FormEvent<HTMLFormElement>) => {
e.preventDefault();
void sendMessage.mutateAsync({ content: message }).then(() => {
void toast.promise(requestGPTResponse.mutateAsync(), {
pending: "Thinking...",
});
});
};
const scrollToBottom = () => {
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
};
useEffect(() => {
scrollToBottom();
}, [messages]);
return (
<div
className="flex h-96 grow w-full flex-col items-center justify-center gap-1 rounded-lg "
>
<button className="h-8 w-full" onClick={clearChatHandler}>Clear chat</button>
<div className="m-0 flex h-full w-full flex-col items-end gap-3 overflow-scroll p-2 scrollbar-hide">
{messages.data?.slice(0).reverse().map((message, index) => (
|
<Message message={message} key={index} />
))}
|
<div className="h-0 w-0" ref={messagesEndRef} />
</div>
<form className="flex w-full" onSubmit={onSubmit}>
<TextInput placeholder="Message" value={message} setValue={setMessage} />
<button className="h-8 w-20" type="submit">Send</button>
</form>
</div>
);
}
|
src/components/chat/ChatBox.tsx
|
hackathon-ufrt-gptnotes-e185e8c
|
[
{
"filename": "src/components/chat/Message.tsx",
"retrieved_chunk": " }, [message]);\n return (\n <div className=\"flex w-full flex-row\">\n {!message.isGPT && <div className=\"w-2/6\" />}\n <div\n className={\n \"w-full rounded-2xl bg-white p-2 \" +\n (message.isGPT ? \" rounded-bl-none\" : \"rounded-br-none\")\n }\n >",
"score": 0.8474060893058777
},
{
"filename": "src/components/chat/SelectCharacterBox.tsx",
"retrieved_chunk": " goToChat={props.goToChat}\n />\n ))}\n </div>\n );\n}",
"score": 0.8449847102165222
},
{
"filename": "src/components/chat/SelectCharacterBox.tsx",
"retrieved_chunk": "import { api } from \"~/utils/api\";\nimport { Character } from \"~/components/chat/Character\";\nexport function SelectCharacterBox(props: { goToChat: () => void }) {\n const characters = api.character.findAll.useQuery();\n return (\n <div className=\"ronded flex h-full w-full flex-col items-center gap-3 pl-2 pr-2 pt-3\">\n {characters.data?.map((character, index) => (\n <Character\n character={character}\n key={index}",
"score": 0.8281075954437256
},
{
"filename": "src/components/chat/SelectPageWrapper.tsx",
"retrieved_chunk": " {selected === \"CHAT\" && <button\n className=\"h-10 w-10\"\n onClick={() => {\n setSelected(\"CHARACTER\");\n }}\n >\n {\"<\"}\n </button>}\n {selected === \"CHAT\" && me?.data?.activeCharacter?.name}\n </div>",
"score": 0.8237751722335815
},
{
"filename": "src/components/todo/TodoBox.tsx",
"retrieved_chunk": "import { api } from \"~/utils/api\";\nimport Todo from \"~/components/todo/Todo\";\nexport function TodoBox() {\n const todos = api.todo.findAll.useQuery();\n return (\n <div className=\"flex w-full flex-col gap-2\">\n <div className=\"flex flex-col gap-3 overflow-scroll rounded scrollbar-hide\">\n {todos.data?.map((todo, index) => (\n <Todo todo={todo} key={index} />\n ))}",
"score": 0.8231416344642639
}
] |
typescript
|
<Message message={message} key={index} />
))}
|
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { z } from "zod";
import { createOpenAICompletion } from "~/external/openai/chatGPTApi";
import { ChatGPTMessage } from "~/external/openai/chatGPTMessage";
import { parseActionCode, stringifyActionCode } from "~/external/openai/chatGPTActionItems";
export const messageRouter = createTRPCRouter({
create: protectedProcedure
.input(
z.object({
content: z.string().min(1).max(200),
})
)
.mutation(({ input, ctx }) => {
return ctx.prisma.message.create({
data: {
content: input.content,
authorId: ctx.session.user.id,
},
});
}),
generateGPT: protectedProcedure.mutation(async ({ ctx }) => {
const todoList = await ctx.prisma.todo.findMany({
where: {
authorId: ctx.session.user.id,
},
});
const lastNMessages = await ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
orderBy: {
createdAt: "desc",
},
take: 5,
include: {
character: true,
},
});
const character = await ctx.prisma.user.findUnique({
where: {
id: ctx.session.user.id,
},
}).activeCharacter();
const chatGptResponse = await createOpenAICompletion(
{
type: "assistant",
characterDescription: character?.content ?? "The depressed robot from Hitchhiker's Guide to the Galaxy",
characterName: character?.name ?? "Marvin",
exampleConverstationStart: character?.exampleConverstationStart ?? "Here I am, brain the size of a planet, and this is what they ask me to do",
actions: []
},
todoList,
lastNMessages.reverse().map((message) => {
if (message.isGPT) {
return {
type: "assistant",
characterDescription: message.character?.content,
characterName: message.character?.name,
actions:
|
parseActionCode(message.content),
} as ChatGPTMessage;
|
}
return {
type: "user",
content: message.content,
} as ChatGPTMessage;
}),
);
for (const action of chatGptResponse.actions) {
if (action.type === "add") {
await ctx.prisma.todo.create({
data: {
title: action.content,
due: action.due,
authorId: ctx.session.user.id,
},
});
}
if (action.type === "complete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: true,
},
});
}
if (action.type === "delete") {
await ctx.prisma.todo.delete({
where: {
id: action.id,
},
});
}
if (action.type === "uncomplete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: false,
},
});
}
}
return ctx.prisma.message.create({
data: {
content: stringifyActionCode(chatGptResponse.actions),
authorId: ctx.session.user.id,
isGPT: true,
characterId: character?.id,
},
});
}),
findAll: protectedProcedure.query(({ ctx }) => {
return ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
include: {
character: true,
},
take: 6,
orderBy: {
createdAt: "desc",
},
});
}),
deleteAll: protectedProcedure.mutation(({ ctx }) => {
return ctx.prisma.message.deleteMany({
where: {
authorId: ctx.session.user.id,
},
});
}),
});
|
src/server/api/routers/message.ts
|
hackathon-ufrt-gptnotes-e185e8c
|
[
{
"filename": "src/external/openai/chatGPTMessage.ts",
"retrieved_chunk": "import { ChatGPTActionItems } from \"./chatGPTActionItems\";\nexport type ChatGPTCharacter = {\n type: \"assistant\",\n characterDescription: string,\n characterName: string,\n exampleConverstationStart: string,\n actions: ChatGPTActionItems[],\n}\nexport type ChatGPTUser = {\n type: \"user\",",
"score": 0.8559268116950989
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": "Today is the ${new Date().toDateString()}.\nThe user will send a text, and Tod-GPT will respond with a command. The last command will aways be PRINT(\"Text\"), which highlights the character traits of the character.\nUser:\nHi, i'm your user. Remind me to ${exampleTodoItem} tomorrow.\nTod-GPT:\nADD(${(new Date()).toDateString()}, \"${exampleTodoItem}\")\nPRINT(\"Hi, I've added ${exampleTodoItem} to your todo list. ${currentCharacter.exampleConverstationStart}.\")\n`;\n let messages = chatHistory.map((message) => {\n return {",
"score": 0.8435852527618408
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " if (todoList.find((todo) => todo.id === action.id) === undefined) {\n throw new Error(`Invalid todo id ${action.id}`);\n }\n }\n }\n return {\n type: \"assistant\",\n characterName: currentCharacter.characterName,\n characterDescription: currentCharacter.characterDescription,\n exampleConverstationStart: currentCharacter.exampleConverstationStart,",
"score": 0.8333401679992676
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " content: message.type === \"assistant\" ? stringifyActionCode(message.actions) : message.content,\n role: message.type === \"assistant\" ? ChatCompletionRequestMessageRoleEnum.Assistant : ChatCompletionRequestMessageRoleEnum.User as ChatCompletionRequestMessageRoleEnum,\n };\n });\n messages = [{\n content: system,\n role: ChatCompletionRequestMessageRoleEnum.System,\n }, ...messages];\n // Run some checks to prevent abuse\n if (messages.length >= 7) {",
"score": 0.8329664468765259
},
{
"filename": "src/external/openai/chatGPTActionItems.ts",
"retrieved_chunk": "export type ChatGPTAction = {\n type: \"add\",\n due: Date,\n content: string\n};\nexport type ChatGPTActionComplete = {\n type: \"complete\",\n id: string\n};\nexport type ChatGPTActionDelete = {",
"score": 0.8317148685455322
}
] |
typescript
|
parseActionCode(message.content),
} as ChatGPTMessage;
|
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(
|
prisma),
providers: [
EmailProvider({
|
server: {
host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
user: env.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
|
src/server/auth.ts
|
hackathon-ufrt-gptnotes-e185e8c
|
[
{
"filename": "src/server/api/routers/character.ts",
"retrieved_chunk": " .mutation(({ input, ctx }) => {\n return ctx.prisma.user.update({\n where: {\n id: ctx.session.user.id,\n },\n data: {\n activeCharacterId: input.id,\n },\n });\n }),",
"score": 0.827379047870636
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " where: {\n authorId: ctx.session.user.id,\n },\n });\n }),\n});",
"score": 0.8226779699325562
},
{
"filename": "src/server/api/routers/todo.ts",
"retrieved_chunk": " id: z.string(),\n })\n )\n .mutation(async ({ input, ctx }) => {\n const todo = await ctx.prisma.todo.findFirst({\n where: {\n id: input.id,\n authorId: ctx.session.user.id,\n },\n });",
"score": 0.821802020072937
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " })\n )\n .mutation(({ input, ctx }) => {\n return ctx.prisma.message.create({\n data: {\n content: input.content,\n authorId: ctx.session.user.id,\n },\n });\n }),",
"score": 0.8195322155952454
},
{
"filename": "src/server/api/routers/todo.ts",
"retrieved_chunk": " authorId: ctx.session.user.id,\n },\n });\n if (!todo) {\n throw new Error(\"No such todo\");\n }\n return ctx.prisma.todo.update({\n where: {\n id: input.id,\n },",
"score": 0.8040297627449036
}
] |
typescript
|
prisma),
providers: [
EmailProvider({
|
/**
* YOU PROBABLY DON'T NEED TO EDIT THIS FILE, UNLESS:
* 1. You want to modify request context (see Part 1).
* 2. You want to create a new middleware or type of procedure (see Part 3).
*
* TL;DR - This is where all the tRPC server stuff is created and plugged in. The pieces you will
* need to use are documented accordingly near the end.
*/
/**
* 1. CONTEXT
*
* This section defines the "contexts" that are available in the backend API.
*
* These allow you to access things when processing a request, like the database, the session, etc.
*/
import { type CreateNextContextOptions } from "@trpc/server/adapters/next";
import { type Session } from "next-auth";
import { getServerAuthSession } from "~/server/auth";
import { prisma } from "~/server/db";
type CreateContextOptions = {
session: Session | null;
};
/**
* This helper generates the "internals" for a tRPC context. If you need to use it, you can export
* it from here.
*
* Examples of things you may need it for:
* - testing, so we don't have to mock Next.js' req/res
* - tRPC's `createSSGHelpers`, where we don't have req/res
*
* @see https://create.t3.gg/en/usage/trpc#-serverapitrpcts
*/
const createInnerTRPCContext = (opts: CreateContextOptions) => {
return {
session: opts.session,
prisma,
};
};
/**
* This is the actual context you will use in your router. It will be used to process every request
* that goes through your tRPC endpoint.
*
* @see https://trpc.io/docs/context
*/
export const createTRPCContext = async (opts: CreateNextContextOptions) => {
const { req, res } = opts;
// Get the session from the server using the getServerSession wrapper function
const session = await
|
getServerAuthSession({ req, res });
|
return createInnerTRPCContext({
session,
});
};
/**
* 2. INITIALIZATION
*
* This is where the tRPC API is initialized, connecting the context and transformer. We also parse
* ZodErrors so that you get typesafety on the frontend if your procedure fails due to validation
* errors on the backend.
*/
import { initTRPC, TRPCError } from "@trpc/server";
import superjson from "superjson";
import { ZodError } from "zod";
const t = initTRPC.context<typeof createTRPCContext>().create({
transformer: superjson,
errorFormatter({ shape, error }) {
return {
...shape,
data: {
...shape.data,
zodError:
error.cause instanceof ZodError ? error.cause.flatten() : null,
},
};
},
});
/**
* 3. ROUTER & PROCEDURE (THE IMPORTANT BIT)
*
* These are the pieces you use to build your tRPC API. You should import these a lot in the
* "/src/server/api/routers" directory.
*/
/**
* This is how you create new routers and sub-routers in your tRPC API.
*
* @see https://trpc.io/docs/router
*/
export const createTRPCRouter = t.router;
/**
* Public (unauthenticated) procedure
*
* This is the base piece you use to build new queries and mutations on your tRPC API. It does not
* guarantee that a user querying is authorized, but you can still access user session data if they
* are logged in.
*/
export const publicProcedure = t.procedure;
/** Reusable middleware that enforces users are logged in before running the procedure. */
const enforceUserIsAuthed = t.middleware(({ ctx, next }) => {
if (!ctx.session || !ctx.session.user) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
return next({
ctx: {
// infers the `session` as non-nullable
session: { ...ctx.session, user: ctx.session.user },
},
});
});
/**
* Protected (authenticated) procedure
*
* If you want a query or mutation to ONLY be accessible to logged in users, use this. It verifies
* the session is valid and guarantees `ctx.session.user` is not null.
*
* @see https://trpc.io/docs/procedures
*/
export const protectedProcedure = t.procedure.use(enforceUserIsAuthed);
|
src/server/api/trpc.ts
|
hackathon-ufrt-gptnotes-e185e8c
|
[
{
"filename": "src/server/auth.ts",
"retrieved_chunk": " res: GetServerSidePropsContext[\"res\"];\n}) => {\n return getServerSession(ctx.req, ctx.res, authOptions);\n};",
"score": 0.8437769412994385
},
{
"filename": "src/pages/api/trpc/[trpc].ts",
"retrieved_chunk": "import { createNextApiHandler } from \"@trpc/server/adapters/next\";\nimport { env } from \"~/env.mjs\";\nimport { createTRPCContext } from \"~/server/api/trpc\";\nimport { appRouter } from \"~/server/api/root\";\n// export API handler\nexport default createNextApiHandler({\n router: appRouter,\n createContext: createTRPCContext,\n onError:\n env.NODE_ENV === \"development\"",
"score": 0.8426591753959656
},
{
"filename": "src/server/auth.ts",
"retrieved_chunk": " */\n ]\n};\n/**\n * Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.\n *\n * @see https://next-auth.js.org/configuration/nextjs\n */\nexport const getServerAuthSession = (ctx: {\n req: GetServerSidePropsContext[\"req\"];",
"score": 0.8227496147155762
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "/**\n * This is the client-side entrypoint for your tRPC API. It is used to create the `api` object which\n * contains the Next.js App-wrapper, as well as your type-safe React Query hooks.\n *\n * We also create a few inference helpers for input and output types.\n */\nimport { httpBatchLink, loggerLink } from \"@trpc/client\";\nimport { createTRPCNext } from \"@trpc/next\";\nimport { type inferRouterInputs, type inferRouterOutputs } from \"@trpc/server\";\nimport superjson from \"superjson\";",
"score": 0.8169990181922913
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "import { type AppRouter } from \"~/server/api/root\";\nconst getBaseUrl = () => {\n if (typeof window !== \"undefined\") return \"\"; // browser should use relative url\n if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url\n return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost\n};\n/** A set of type-safe react-query hooks for your tRPC API. */\nexport const api = createTRPCNext<AppRouter>({\n config() {\n return {",
"score": 0.8098630309104919
}
] |
typescript
|
getServerAuthSession({ req, res });
|
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { z } from "zod";
import { createOpenAICompletion } from "~/external/openai/chatGPTApi";
import { ChatGPTMessage } from "~/external/openai/chatGPTMessage";
import { parseActionCode, stringifyActionCode } from "~/external/openai/chatGPTActionItems";
export const messageRouter = createTRPCRouter({
create: protectedProcedure
.input(
z.object({
content: z.string().min(1).max(200),
})
)
.mutation(({ input, ctx }) => {
return ctx.prisma.message.create({
data: {
content: input.content,
authorId: ctx.session.user.id,
},
});
}),
generateGPT: protectedProcedure.mutation(async ({ ctx }) => {
const todoList = await ctx.prisma.todo.findMany({
where: {
authorId: ctx.session.user.id,
},
});
const lastNMessages = await ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
orderBy: {
createdAt: "desc",
},
take: 5,
include: {
character: true,
},
});
const character = await ctx.prisma.user.findUnique({
where: {
id: ctx.session.user.id,
},
}).activeCharacter();
|
const chatGptResponse = await createOpenAICompletion(
{
|
type: "assistant",
characterDescription: character?.content ?? "The depressed robot from Hitchhiker's Guide to the Galaxy",
characterName: character?.name ?? "Marvin",
exampleConverstationStart: character?.exampleConverstationStart ?? "Here I am, brain the size of a planet, and this is what they ask me to do",
actions: []
},
todoList,
lastNMessages.reverse().map((message) => {
if (message.isGPT) {
return {
type: "assistant",
characterDescription: message.character?.content,
characterName: message.character?.name,
actions: parseActionCode(message.content),
} as ChatGPTMessage;
}
return {
type: "user",
content: message.content,
} as ChatGPTMessage;
}),
);
for (const action of chatGptResponse.actions) {
if (action.type === "add") {
await ctx.prisma.todo.create({
data: {
title: action.content,
due: action.due,
authorId: ctx.session.user.id,
},
});
}
if (action.type === "complete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: true,
},
});
}
if (action.type === "delete") {
await ctx.prisma.todo.delete({
where: {
id: action.id,
},
});
}
if (action.type === "uncomplete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: false,
},
});
}
}
return ctx.prisma.message.create({
data: {
content: stringifyActionCode(chatGptResponse.actions),
authorId: ctx.session.user.id,
isGPT: true,
characterId: character?.id,
},
});
}),
findAll: protectedProcedure.query(({ ctx }) => {
return ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
include: {
character: true,
},
take: 6,
orderBy: {
createdAt: "desc",
},
});
}),
deleteAll: protectedProcedure.mutation(({ ctx }) => {
return ctx.prisma.message.deleteMany({
where: {
authorId: ctx.session.user.id,
},
});
}),
});
|
src/server/api/routers/message.ts
|
hackathon-ufrt-gptnotes-e185e8c
|
[
{
"filename": "src/server/api/routers/character.ts",
"retrieved_chunk": " .mutation(({ input, ctx }) => {\n return ctx.prisma.user.update({\n where: {\n id: ctx.session.user.id,\n },\n data: {\n activeCharacterId: input.id,\n },\n });\n }),",
"score": 0.8716613054275513
},
{
"filename": "src/server/api/routers/character.ts",
"retrieved_chunk": " .mutation(({ input, ctx }) => {\n return ctx.prisma.character.create({\n data: {\n name: input.name,\n content: input.content,\n authorId: ctx.session.user.id,\n },\n });\n }),\n findAll: publicProcedure.query(({ ctx }) => {",
"score": 0.8487736582756042
},
{
"filename": "src/components/chat/Character.tsx",
"retrieved_chunk": " const context = api.useContext();\n const setCharacter = api.character.setActiveCharacter.useMutation({\n onSuccess: async () => {\n await context.me.invalidate();\n goToChat();\n },\n });\n return (\n <button\n onClick={() => setCharacter.mutate({ id: character.id })}",
"score": 0.8477376103401184
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " apiKey: env.OPENAI_API_KEY,\n});\nconst openai = new OpenAIApi(configuration);\nexport async function listModels() {\n const response = await openai.listModels();\n const models = response.data.data.map((model) => model.id);\n return models;\n}\nexport async function createOpenAICompletion(currentCharacter: ChatGPTCharacter, todoList: ChatGPTTodo[], chatHistory: ChatGPTMessage[]): Promise<ChatGPTCharacter> {\n const exampleTodoItem = todoList.length > 0 ? todoList[0]?.title ?? \"do something\" : \"do something\";",
"score": 0.838005781173706
},
{
"filename": "src/external/openai/chatGPTMessage.ts",
"retrieved_chunk": "import { ChatGPTActionItems } from \"./chatGPTActionItems\";\nexport type ChatGPTCharacter = {\n type: \"assistant\",\n characterDescription: string,\n characterName: string,\n exampleConverstationStart: string,\n actions: ChatGPTActionItems[],\n}\nexport type ChatGPTUser = {\n type: \"user\",",
"score": 0.8379950523376465
}
] |
typescript
|
const chatGptResponse = await createOpenAICompletion(
{
|
/**
* YOU PROBABLY DON'T NEED TO EDIT THIS FILE, UNLESS:
* 1. You want to modify request context (see Part 1).
* 2. You want to create a new middleware or type of procedure (see Part 3).
*
* TL;DR - This is where all the tRPC server stuff is created and plugged in. The pieces you will
* need to use are documented accordingly near the end.
*/
/**
* 1. CONTEXT
*
* This section defines the "contexts" that are available in the backend API.
*
* These allow you to access things when processing a request, like the database, the session, etc.
*/
import { type CreateNextContextOptions } from "@trpc/server/adapters/next";
import { type Session } from "next-auth";
import { getServerAuthSession } from "~/server/auth";
import { prisma } from "~/server/db";
type CreateContextOptions = {
session: Session | null;
};
/**
* This helper generates the "internals" for a tRPC context. If you need to use it, you can export
* it from here.
*
* Examples of things you may need it for:
* - testing, so we don't have to mock Next.js' req/res
* - tRPC's `createSSGHelpers`, where we don't have req/res
*
* @see https://create.t3.gg/en/usage/trpc#-serverapitrpcts
*/
const createInnerTRPCContext = (opts: CreateContextOptions) => {
return {
session: opts.session,
prisma,
};
};
/**
* This is the actual context you will use in your router. It will be used to process every request
* that goes through your tRPC endpoint.
*
* @see https://trpc.io/docs/context
*/
export const createTRPCContext = async (opts: CreateNextContextOptions) => {
const { req, res } = opts;
// Get the session from the server using the getServerSession wrapper function
const session =
|
await getServerAuthSession({ req, res });
|
return createInnerTRPCContext({
session,
});
};
/**
* 2. INITIALIZATION
*
* This is where the tRPC API is initialized, connecting the context and transformer. We also parse
* ZodErrors so that you get typesafety on the frontend if your procedure fails due to validation
* errors on the backend.
*/
import { initTRPC, TRPCError } from "@trpc/server";
import superjson from "superjson";
import { ZodError } from "zod";
const t = initTRPC.context<typeof createTRPCContext>().create({
transformer: superjson,
errorFormatter({ shape, error }) {
return {
...shape,
data: {
...shape.data,
zodError:
error.cause instanceof ZodError ? error.cause.flatten() : null,
},
};
},
});
/**
* 3. ROUTER & PROCEDURE (THE IMPORTANT BIT)
*
* These are the pieces you use to build your tRPC API. You should import these a lot in the
* "/src/server/api/routers" directory.
*/
/**
* This is how you create new routers and sub-routers in your tRPC API.
*
* @see https://trpc.io/docs/router
*/
export const createTRPCRouter = t.router;
/**
* Public (unauthenticated) procedure
*
* This is the base piece you use to build new queries and mutations on your tRPC API. It does not
* guarantee that a user querying is authorized, but you can still access user session data if they
* are logged in.
*/
export const publicProcedure = t.procedure;
/** Reusable middleware that enforces users are logged in before running the procedure. */
const enforceUserIsAuthed = t.middleware(({ ctx, next }) => {
if (!ctx.session || !ctx.session.user) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
return next({
ctx: {
// infers the `session` as non-nullable
session: { ...ctx.session, user: ctx.session.user },
},
});
});
/**
* Protected (authenticated) procedure
*
* If you want a query or mutation to ONLY be accessible to logged in users, use this. It verifies
* the session is valid and guarantees `ctx.session.user` is not null.
*
* @see https://trpc.io/docs/procedures
*/
export const protectedProcedure = t.procedure.use(enforceUserIsAuthed);
|
src/server/api/trpc.ts
|
hackathon-ufrt-gptnotes-e185e8c
|
[
{
"filename": "src/server/auth.ts",
"retrieved_chunk": " res: GetServerSidePropsContext[\"res\"];\n}) => {\n return getServerSession(ctx.req, ctx.res, authOptions);\n};",
"score": 0.8440855741500854
},
{
"filename": "src/pages/api/trpc/[trpc].ts",
"retrieved_chunk": "import { createNextApiHandler } from \"@trpc/server/adapters/next\";\nimport { env } from \"~/env.mjs\";\nimport { createTRPCContext } from \"~/server/api/trpc\";\nimport { appRouter } from \"~/server/api/root\";\n// export API handler\nexport default createNextApiHandler({\n router: appRouter,\n createContext: createTRPCContext,\n onError:\n env.NODE_ENV === \"development\"",
"score": 0.8392189741134644
},
{
"filename": "src/server/auth.ts",
"retrieved_chunk": " */\n ]\n};\n/**\n * Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.\n *\n * @see https://next-auth.js.org/configuration/nextjs\n */\nexport const getServerAuthSession = (ctx: {\n req: GetServerSidePropsContext[\"req\"];",
"score": 0.8232304453849792
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "/**\n * This is the client-side entrypoint for your tRPC API. It is used to create the `api` object which\n * contains the Next.js App-wrapper, as well as your type-safe React Query hooks.\n *\n * We also create a few inference helpers for input and output types.\n */\nimport { httpBatchLink, loggerLink } from \"@trpc/client\";\nimport { createTRPCNext } from \"@trpc/next\";\nimport { type inferRouterInputs, type inferRouterOutputs } from \"@trpc/server\";\nimport superjson from \"superjson\";",
"score": 0.8161154985427856
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "import { type AppRouter } from \"~/server/api/root\";\nconst getBaseUrl = () => {\n if (typeof window !== \"undefined\") return \"\"; // browser should use relative url\n if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url\n return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost\n};\n/** A set of type-safe react-query hooks for your tRPC API. */\nexport const api = createTRPCNext<AppRouter>({\n config() {\n return {",
"score": 0.8066877126693726
}
] |
typescript
|
await getServerAuthSession({ req, res });
|
// for license and copyright look at the repository
import { IReport, IReportConfigurationEntry, IReportConfigInfo, PullRequestCallback } from './Interfaces/ReportTypes'
export enum ConfigurationCategory {
None,
StaticMeasures,
TimeRelatedMeasures,
StatusCheckRelatedMeasures,
ReportGeneratorValue,
}
export const ConfigurationCategoryTitleMap = new Map<ConfigurationCategory, string>([
[ConfigurationCategory.None, 'None'],
[ConfigurationCategory.StaticMeasures, 'Static measures'],
[ConfigurationCategory.TimeRelatedMeasures, 'Time related measures'],
[ConfigurationCategory.StatusCheckRelatedMeasures, 'Status check related measures'],
[ConfigurationCategory.ReportGeneratorValue, 'Report generator related predefined strings'],
])
export class ConfigurationInfo implements IReportConfigInfo {
public Description
public PresentationValue
public Value
public ConfigurationName
public ConfigValue
public ConfigurationCategory
constructor(
label: string,
presentationValue: string | number,
value: string | number,
configName: string,
defaultConfigValue: string | number,
configurationCategory: ConfigurationCategory,
) {
this.Description = label
this.PresentationValue = presentationValue
this.Value = value
this.ConfigurationName = configName
this.ConfigValue = defaultConfigValue
this.ConfigurationCategory = configurationCategory
}
}
|
export class ReportConfigurationEntry implements IReportConfigurationEntry {
|
public Id
public Info
public PullRequestCallback: PullRequestCallback
constructor(id = '', info: IReportConfigInfo, measureCallback: PullRequestCallback = () => '') {
this.Id = id
this.Info = info
this.PullRequestCallback = measureCallback
}
}
export class Report implements IReport {
public Id = ''
public Description = ''
public Entries: ReportConfigurationEntry[] = []
}
|
src/Report.Definitions.ts
|
philips-software-pull-request-report-action-3390d78
|
[
{
"filename": "src/Interfaces/ReportTypes.ts",
"retrieved_chunk": " ConfigValue: string | number\n}\nexport interface IReportConfigurationEntry {\n Id: string\n Info: IReportConfigInfo\n PullRequestCallback: PullRequestCallback\n}\nexport interface IReport {\n Id: string\n Description: string",
"score": 0.8504272103309631
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " // get the property value of inputs\n entry.Info.ConfigValue = (configValues as { [key: string]: string | number })[entry.Info.ConfigurationName]\n })\n return measurementEntries\n}\nexport const GetActiveMeasures = (entries: Array<ReportConfigurationEntry>): Array<ReportConfigurationEntry> => {\n return entries.filter((entry) => entry.Info.ConfigValue === 'yes')\n}\nexport const ReportConfigurationTable = new Array<ReportConfigurationEntry>()\nReportConfigurationTable.push(",
"score": 0.8059393167495728
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " new ReportConfigurationEntry(\n 'commits',\n new ConfigurationInfo(\n 'Number of commits',\n 0,\n 0,\n 'ShowNumberOfCommits',\n 'yes',\n ConfigurationCategory.StaticMeasures,\n ),",
"score": 0.8040290474891663
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " new ConfigurationInfo(\n 'Pull Request Report',\n 0,\n 0,\n 'ReportTitle',\n 'Pull Request Report',\n ConfigurationCategory.ReportGeneratorValue,\n ),\n () => 0,\n ),",
"score": 0.8034855723381042
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": ")\nReportConfigurationTable.push(\n new ReportConfigurationEntry(\n 'no_of_change_requested_reviews',\n new ConfigurationInfo(\n 'Number of reviews that requested a change from the author',\n 0,\n 0,\n 'ShowNumberOfRequestedChangeReviews',\n 'yes',",
"score": 0.803292989730835
}
] |
typescript
|
export class ReportConfigurationEntry implements IReportConfigurationEntry {
|
// for license and copyright look at the repository
import { IPullRequest } from './Interfaces/PullRequestTypes'
import { IReport, IReportConfigurationEntry } from './Interfaces/ReportTypes'
import { tsMarkdown, table, TableEntry, H1Entry, H3Entry, MarkdownEntry } from 'ts-markdown'
import { ConfigurationCategory, ConfigurationCategoryTitleMap } from './Report.Definitions'
export class ReportGenerator {
DescriptionHeaderLabel = 'Description'
ValueHeaderLabel = 'Value'
public Generate(pr: IPullRequest, report: IReport): string {
const header = this.GenerateHeader(pr, report)
const table = this.GenerateMeasureTable(pr, report)
const reportElements = [header, ...table]
return tsMarkdown(reportElements)
}
public GenerateHeader(pr: IPullRequest, report: IReport): H1Entry {
const title = { h1: `${report.Description} (#${pr.id})` }
return title
}
public GetMeasurementEntries(entries: IReportConfigurationEntry[]): IReportConfigurationEntry[] {
if (entries !== undefined && entries !== null && entries.length > 0) {
return entries.filter((entry) => ConfigurationCategory[entry.Info.ConfigurationCategory].endsWith('Measures'))
}
return []
}
public GenerateMeasureTable(pr: IPullRequest, report: IReport): MarkdownEntry[] {
const tables: MarkdownEntry[] = []
const entries = this.GetMeasurementEntries(report.Entries)
const categories = new Set(entries.map((entry) => entry.Info.ConfigurationCategory))
categories.forEach((category) => {
tables.push(this.GenerateCategoryTitle(category))
tables.push(this.GenerateCategoryTable(pr, report, category))
})
return tables
}
private GenerateCategoryTitle(measureCategory: ConfigurationCategory): H3Entry {
const title = { h3: `${
|
ConfigurationCategoryTitleMap.get(measureCategory) || 'No category'}` }
|
return title
}
private GenerateCategoryTable(pr: IPullRequest, report: IReport, measureCategory: ConfigurationCategory): TableEntry {
const entries = this.GetMeasurementEntries(report.Entries)
const categoryEntries = entries.filter((entry) => entry.Info.ConfigurationCategory === measureCategory)
categoryEntries.forEach((entry) => {
entry.Info.Value = entry.PullRequestCallback(pr)
})
const rows = categoryEntries.map((entry) => ({
Description: entry.Info.Description,
Value: entry.Info.Value,
}))
return table({
columns: [{ name: this.DescriptionHeaderLabel }, { name: this.ValueHeaderLabel }],
rows: rows,
})
}
}
|
src/Report.Generation.ts
|
philips-software-pull-request-report-action-3390d78
|
[
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " // get the property value of inputs\n entry.Info.ConfigValue = (configValues as { [key: string]: string | number })[entry.Info.ConfigurationName]\n })\n return measurementEntries\n}\nexport const GetActiveMeasures = (entries: Array<ReportConfigurationEntry>): Array<ReportConfigurationEntry> => {\n return entries.filter((entry) => entry.Info.ConfigValue === 'yes')\n}\nexport const ReportConfigurationTable = new Array<ReportConfigurationEntry>()\nReportConfigurationTable.push(",
"score": 0.8180140256881714
},
{
"filename": "src/Report.Definitions.ts",
"retrieved_chunk": "// for license and copyright look at the repository\nimport { IReport, IReportConfigurationEntry, IReportConfigInfo, PullRequestCallback } from './Interfaces/ReportTypes'\nexport enum ConfigurationCategory {\n None,\n StaticMeasures,\n TimeRelatedMeasures,\n StatusCheckRelatedMeasures,\n ReportGeneratorValue,\n}\nexport const ConfigurationCategoryTitleMap = new Map<ConfigurationCategory, string>([",
"score": 0.8172417879104614
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": ")\nReportConfigurationTable.push(\n new ReportConfigurationEntry(\n 'additions',\n new ConfigurationInfo('Number of added lines', 0, 0, 'ShowAdditions', 'yes', ConfigurationCategory.StaticMeasures),\n GetAddedLines,\n ),\n)\nReportConfigurationTable.push(\n new ReportConfigurationEntry(",
"score": 0.7990212440490723
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " 0,\n 0,\n 'ShowNumberOfApprovedReviews',\n 'yes',\n ConfigurationCategory.StaticMeasures,\n ),\n (pr) => GetNumberOfApprovedReviews(pr),\n ),\n)\nReportConfigurationTable.push(",
"score": 0.7899518609046936
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": ")\nReportConfigurationTable.push(\n new ReportConfigurationEntry(\n 'pr_lead_time',\n new ConfigurationInfo(\n 'PR lead time (from creation to close of PR)',\n 0,\n 0,\n 'ShowPRLeadTime',\n 'yes',",
"score": 0.7864553332328796
}
] |
typescript
|
ConfigurationCategoryTitleMap.get(measureCategory) || 'No category'}` }
|
/**
* @swagger
* components:
* schemas:
* SignupRequest:
* type: object
* required:
* - email
* - password
* - name
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* name: John Doe
* email: johndoe@example.com
* password: password123
* LoginRequest:
* type: object
* required:
* - email
* - password
* properties:
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* email: johndoe@example.com
* password: password123
*/
import express from 'express';
import { transferFund } from '../service';
import { protect } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/account/transfer:
* post:
* tags:
* - Transfer
* summary: Transfer funds between accounts
* security:
* - BearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* fromAccountId:
* type: string
* description: The ID of the account to transfer funds from.
* example: "123456"
* toAccountId:
* type: string
* description: The ID of the account to transfer funds to.
* example: "789012"
* amount:
* type: number
* description: The amount of funds to transfer.
* example: 1000.00
* tag:
* type: string
* description: The tag associated with the transfer.
* example: "Rent payment"
* responses:
* '200':
* description: Successful transfer of funds
* '400':
* description: Invalid request parameters
* '401':
* description: Unauthorized request
*/
router.post('
|
/transfer', protect, transferFund);
|
export default router;
|
src/modules/account/controller/index.ts
|
walosha-BACKEND_DEV_TESTS-db2fcb4
|
[
{
"filename": "src/modules/account/service/index.ts",
"retrieved_chunk": "import { Request, Response } from 'express';\nimport Account from '../model';\nexport const transferFund = async (req: Request, res: Response) => {\n const { fromAccountId, toAccountId, amount } = req.body;\n try {\n let srcAccount: any = await Account.findById(fromAccountId);\n let destAccount: any = await Account.findById(toAccountId);\n if (String(srcAccount.user) == String(destAccount.user)) {\n return res.status(400).json({\n error: 'Cannot transfer to own acccount',",
"score": 0.834482729434967
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * security:\n * - bearerAuth: []\n * responses:\n * \"200\":\n * description: The user profile\n * \"401\":\n * description: Unauthorized\n */\nrouter.post('/me', protect, getMe);\nexport default router;",
"score": 0.8088322877883911
},
{
"filename": "src/modules/account/service/index.ts",
"retrieved_chunk": " $sum: '$balance',\n },\n },\n },\n ]);\n console.log(destUserData[0].total);\n return res.json({\n newSrcBalance: srcAccount.balance,\n totalDestBalance: destUserData[0].total,\n transferedAt: new Date(),",
"score": 0.7940449118614197
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": "import express from 'express';\nimport { getMe, login, refresh, signup } from '../service';\nimport { refreshMiddleware } from '../../../middleware/refresh';\nimport { protect } from '../../../middleware';\nconst router = express.Router();\n/**\n * @swagger\n * /api/v1/auth/signup:\n * post:\n * summary: Creates an account",
"score": 0.7832872867584229
},
{
"filename": "src/modules/account/service/index.ts",
"retrieved_chunk": " error: 'Transcation failed',\n });\n }\n};",
"score": 0.7809113264083862
}
] |
typescript
|
/transfer', protect, transferFund);
|
/**
* @swagger
* components:
* schemas:
* SignupRequest:
* type: object
* required:
* - email
* - password
* - name
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* name: John Doe
* email: johndoe@example.com
* password: password123
* LoginRequest:
* type: object
* required:
* - email
* - password
* properties:
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* email: johndoe@example.com
* password: password123
*/
import express from 'express';
import { transferFund } from '../service';
import { protect } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/account/transfer:
* post:
* tags:
* - Transfer
* summary: Transfer funds between accounts
* security:
* - BearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* fromAccountId:
* type: string
* description: The ID of the account to transfer funds from.
* example: "123456"
* toAccountId:
* type: string
* description: The ID of the account to transfer funds to.
* example: "789012"
* amount:
* type: number
* description: The amount of funds to transfer.
* example: 1000.00
* tag:
* type: string
* description: The tag associated with the transfer.
* example: "Rent payment"
* responses:
* '200':
* description: Successful transfer of funds
* '400':
* description: Invalid request parameters
* '401':
* description: Unauthorized request
*/
|
router.post('/transfer', protect, transferFund);
|
export default router;
|
src/modules/account/controller/index.ts
|
walosha-BACKEND_DEV_TESTS-db2fcb4
|
[
{
"filename": "src/modules/account/service/index.ts",
"retrieved_chunk": "import { Request, Response } from 'express';\nimport Account from '../model';\nexport const transferFund = async (req: Request, res: Response) => {\n const { fromAccountId, toAccountId, amount } = req.body;\n try {\n let srcAccount: any = await Account.findById(fromAccountId);\n let destAccount: any = await Account.findById(toAccountId);\n if (String(srcAccount.user) == String(destAccount.user)) {\n return res.status(400).json({\n error: 'Cannot transfer to own acccount',",
"score": 0.8303511142730713
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * security:\n * - bearerAuth: []\n * responses:\n * \"200\":\n * description: The user profile\n * \"401\":\n * description: Unauthorized\n */\nrouter.post('/me', protect, getMe);\nexport default router;",
"score": 0.8049241304397583
},
{
"filename": "src/modules/account/service/index.ts",
"retrieved_chunk": " $sum: '$balance',\n },\n },\n },\n ]);\n console.log(destUserData[0].total);\n return res.json({\n newSrcBalance: srcAccount.balance,\n totalDestBalance: destUserData[0].total,\n transferedAt: new Date(),",
"score": 0.7927572727203369
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": "import express from 'express';\nimport { getMe, login, refresh, signup } from '../service';\nimport { refreshMiddleware } from '../../../middleware/refresh';\nimport { protect } from '../../../middleware';\nconst router = express.Router();\n/**\n * @swagger\n * /api/v1/auth/signup:\n * post:\n * summary: Creates an account",
"score": 0.7793903350830078
},
{
"filename": "src/modules/account/service/index.ts",
"retrieved_chunk": " error: 'Transcation failed',\n });\n }\n};",
"score": 0.7723113298416138
}
] |
typescript
|
router.post('/transfer', protect, transferFund);
|
// for license and copyright look at the repository
import {
IPullRequest,
IPullRequestComment,
IPullRequestCommit,
IPullRequestReview,
} from './Interfaces/PullRequestTypes'
import { EventWithTime } from './Interfaces/ReportTypes'
import { StatusCheck } from './PullRequest.Definitions'
export const GenerateEventTimeline = (pullRequest: IPullRequest): EventWithTime[] => {
const events: EventWithTime[][] = []
// merge all interesting events into a single list
events.push([
{ type: 'createAt', date: new Date(pullRequest.createdAt), event_instance: pullRequest.createdAt, time: 0 },
])
events.push(
pullRequest.commits.map((commit) => ({
type: 'commit',
date: new Date(commit.authorDate),
event_instance: commit,
time: 0,
})),
)
events.push(
pullRequest.reviews.map((review) => ({
type: 'review',
date: new Date(review.submittedAt),
event_instance: review,
time: 0,
})),
)
events.push(
pullRequest.statusChecks.map((statusCheck) => ({
type: 'statusCheck',
date: new Date(statusCheck.completedAt),
event_instance: statusCheck,
time: 0,
})),
)
events.push(
pullRequest.comments.map((comment) => ({
type: 'comment',
date: new Date(comment.createdAt),
event_instance: comment,
time: 0,
})),
)
events.push([
{ type: 'mergedAt', date: new Date(pullRequest.mergedAt), event_instance: pullRequest.mergedAt, time: 0 },
])
events.push([
{ type: 'closedAt', date: new Date(pullRequest.closedAt), event_instance: pullRequest.closedAt, time: 0 },
])
// flatten the list
const flattenedEvents = events.flat()
// filter out events that don't have a valid date
const filteredEvents = flattenedEvents.filter((event) => event.date !== null)
// sort the events by date
filteredEvents.sort((a, b) => a.date.getTime() - b.date.getTime())
// now, create a list of events with the time between events
const eventsWithTime: EventWithTime[] = []
// calculate the time between events
for (let i = 0; i < filteredEvents.length; i++) {
if (i === 0) {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: 0,
|
event_instance: filteredEvents[i].event_instance,
})
} else {
|
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: (filteredEvents[i].date.getTime() - filteredEvents[i - 1].date.getTime()) / 1000,
event_instance: filteredEvents[i].event_instance,
})
}
}
return eventsWithTime
}
export const MillisecondsToReadableDuration = (leadTimeInMSec: number) => {
const seconds = +(leadTimeInMSec / 1000).toFixed(1)
const minutes = +(leadTimeInMSec / (1000 * 60)).toFixed(1)
const hours = +(leadTimeInMSec / (1000 * 60 * 60)).toFixed(1)
const days = +(leadTimeInMSec / (1000 * 60 * 60 * 24)).toFixed(1)
if (seconds < 60) return `${seconds} Sec`
else if (minutes < 60) return `${minutes} Min`
else if (hours < 24) return `${hours} Hours`
else return `${days} Days`
}
export const GetMergedOrClosedDate = (pullRequest: IPullRequest): string => {
let mergedOrClosedAt = pullRequest.mergedAt
if (mergedOrClosedAt == null) mergedOrClosedAt = pullRequest.closedAt
return mergedOrClosedAt
}
export const GetLeadTimeForPullRequest = (pullRequest: IPullRequest) => {
// parse createAt as date from string
const createAt = new Date(pullRequest.createdAt)
const mergedOrClosedAt = new Date(GetMergedOrClosedDate(pullRequest))
const duration = mergedOrClosedAt.getTime() - createAt.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRCreated = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const createAtEvent = eventTimeline.find((event) => event.type === 'createAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (!createAtEvent || !firstCommitEvent) return 0
const duration = createAtEvent.date.getTime() - firstCommitEvent.date.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRMerged = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (mergedAtEvent && firstCommitEvent && mergedAtEvent.date.getTime() > firstCommitEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - firstCommitEvent.date.getTime()
}
return -1
}
export const GetTimeToMergeAfterLastReview = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return -1
}
const lastReviewEvent = reviewEvents.reverse()[0]
if (mergedAtEvent && lastReviewEvent && mergedAtEvent.date.getTime() > lastReviewEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - lastReviewEvent.date.getTime()
}
return -1
}
export const GetTotalRuntimeForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let totalTime = 0
statusCheckEvents.forEach((statusCheck) => {
totalTime += new Date(statusCheck.completedAt).getTime() - new Date(statusCheck.startedAt).getTime()
})
return totalTime
}
export const GetTimeSpendInPrForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let earliestStart = new Date()
let latestCompletion = new Date(0, 0, 0)
statusCheckEvents.forEach((statusCheckEvent) => {
const completedDate = new Date(statusCheckEvent.completedAt)
const startedDate = new Date(statusCheckEvent.startedAt)
if (startedDate < earliestStart) {
earliestStart = startedDate
}
if (completedDate > latestCompletion) {
latestCompletion = completedDate
}
})
return latestCompletion.getTime() - earliestStart.getTime()
}
const FilterReviewsByState = (pullRequest: IPullRequest, state: string) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return []
}
const filteredReviews = reviewEvents.filter((reviewEvent) => {
const review = reviewEvent.event_instance as IPullRequestReview
return review.state === state
})
return filteredReviews
}
export const GetNumberOfCommentOnlyReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'COMMENTED').length
}
export const GetNumberOfRequestedChangeReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'CHANGES_REQUESTED').length
}
export const GetNumberOfApprovedReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'APPROVED').length
}
export const GetUniqueReviewParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
// extract unique reviewers from review events
return reviewEvents
.map((reviewEvent) => reviewEvent.event_instance as IPullRequestReview)
.map((review) => review.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommentParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commentEvents = eventTimeline.filter((event) => event.type === 'comment')
// extract unique commenter from review events
return commentEvents
.map((commentEvent) => commentEvent.event_instance as IPullRequestComment)
.map((comment) => comment.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommitterParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commitEvents = eventTimeline.filter((event) => event.type === 'commit')
// extract unique reviewers from review events
return commitEvents
.map((commitEvent) => commitEvent.event_instance as IPullRequestCommit)
.map((commit) => commit.authors.filter((author) => author.login !== null).map((author) => author.login))
.flat()
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetNumberOfActivePullRequestReviewParticipants = (pullRequest: IPullRequest) => {
const uniqueReviewers = GetUniqueReviewParticipants(pullRequest)
const uniqueCommenter = GetUniqueCommentParticipants(pullRequest)
return uniqueReviewers.concat(uniqueCommenter).filter((value, index, self) => self.indexOf(value) === index).length
}
export const GetNumberOfPullRequestCommitter = (pullRequest: IPullRequest) => {
return GetUniqueCommitterParticipants(pullRequest).length
}
export const GetTotalNumberOfParticipants = (pullRequest: IPullRequest) => {
return GetNumberOfActivePullRequestReviewParticipants(pullRequest) + GetNumberOfPullRequestCommitter(pullRequest)
}
|
src/Report.Calculation.ts
|
philips-software-pull-request-report-action-3390d78
|
[
{
"filename": "src/Interfaces/ReportTypes.ts",
"retrieved_chunk": " Entries: IReportConfigurationEntry[]\n}\nexport interface EventWithTime {\n type: string\n date: Date\n time: number\n event_instance: unknown\n}",
"score": 0.7843917012214661
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " reviews: unknown[]\n comments: unknown[]\n statusCheckRollup: unknown[]\n fileChangeSummary: unknown\n }\n const pr = new PullRequest()\n pr.id = cliPullRequestObject['number']\n pr.title = cliPullRequestObject['title']\n pr.createdAt = cliPullRequestObject['createdAt']\n pr.updatedAt = cliPullRequestObject['updatedAt']",
"score": 0.7275853157043457
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " // get the property value of inputs\n entry.Info.ConfigValue = (configValues as { [key: string]: string | number })[entry.Info.ConfigurationName]\n })\n return measurementEntries\n}\nexport const GetActiveMeasures = (entries: Array<ReportConfigurationEntry>): Array<ReportConfigurationEntry> => {\n return entries.filter((entry) => entry.Info.ConfigValue === 'yes')\n}\nexport const ReportConfigurationTable = new Array<ReportConfigurationEntry>()\nReportConfigurationTable.push(",
"score": 0.7244305610656738
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " const categoryEntries = entries.filter((entry) => entry.Info.ConfigurationCategory === measureCategory)\n categoryEntries.forEach((entry) => {\n entry.Info.Value = entry.PullRequestCallback(pr)\n })\n const rows = categoryEntries.map((entry) => ({\n Description: entry.Info.Description,\n Value: entry.Info.Value,\n }))\n return table({\n columns: [{ name: this.DescriptionHeaderLabel }, { name: this.ValueHeaderLabel }],",
"score": 0.7240709066390991
},
{
"filename": "src/action.config.args.ts",
"retrieved_chunk": " ShowTimeSpendOnBranchBeforePrMerged: core.getInput('ShowTimeSpendOnBranchBeforePrMerged', { required: false }),\n ShowTimeToMergeAfterLastReview: core.getInput('ShowTimeToMergeAfterLastReview', { required: false }),\n}",
"score": 0.7206226587295532
}
] |
typescript
|
event_instance: filteredEvents[i].event_instance,
})
} else {
|
// for license and copyright look at the repository
import {
IPullRequest,
IPullRequestComment,
IPullRequestCommit,
IPullRequestReview,
} from './Interfaces/PullRequestTypes'
import { EventWithTime } from './Interfaces/ReportTypes'
import { StatusCheck } from './PullRequest.Definitions'
export const GenerateEventTimeline = (pullRequest: IPullRequest): EventWithTime[] => {
const events: EventWithTime[][] = []
// merge all interesting events into a single list
events.push([
{ type: 'createAt', date: new Date(pullRequest.createdAt), event_instance: pullRequest.createdAt, time: 0 },
])
events.push(
pullRequest.commits.map((commit) => ({
type: 'commit',
date: new Date(commit.authorDate),
event_instance: commit,
time: 0,
})),
)
events.push(
pullRequest.reviews.map((review) => ({
type: 'review',
date: new Date(review.submittedAt),
event_instance: review,
time: 0,
})),
)
events.push(
pullRequest.statusChecks.map((statusCheck) => ({
type: 'statusCheck',
date: new Date(statusCheck.completedAt),
event_instance: statusCheck,
time: 0,
})),
)
events.push(
pullRequest.comments.map((comment) => ({
type: 'comment',
date: new Date(comment.createdAt),
event_instance: comment,
time: 0,
})),
)
events.push([
{ type: 'mergedAt', date: new Date(pullRequest.mergedAt), event_instance: pullRequest.mergedAt, time: 0 },
])
events.push([
{ type: 'closedAt', date: new Date(pullRequest.closedAt), event_instance: pullRequest.closedAt, time: 0 },
])
// flatten the list
const flattenedEvents = events.flat()
// filter out events that don't have a valid date
const filteredEvents = flattenedEvents.filter((event) => event.date !== null)
// sort the events by date
filteredEvents.sort((a, b) => a.date.getTime() - b.date.getTime())
// now, create a list of events with the time between events
const eventsWithTime: EventWithTime[] = []
// calculate the time between events
for (let i = 0; i < filteredEvents.length; i++) {
if (i === 0) {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: 0,
event_instance: filteredEvents[i].event_instance,
})
} else {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: (filteredEvents[i].date.getTime() - filteredEvents[i - 1].date.getTime()) / 1000,
event_instance: filteredEvents[i].event_instance,
})
}
}
return eventsWithTime
}
export const MillisecondsToReadableDuration = (leadTimeInMSec: number) => {
const seconds = +(leadTimeInMSec / 1000).toFixed(1)
const minutes = +(leadTimeInMSec / (1000 * 60)).toFixed(1)
const hours = +(leadTimeInMSec / (1000 * 60 * 60)).toFixed(1)
const days = +(leadTimeInMSec / (1000 * 60 * 60 * 24)).toFixed(1)
if (seconds < 60) return `${seconds} Sec`
else if (minutes < 60) return `${minutes} Min`
else if (hours < 24) return `${hours} Hours`
else return `${days} Days`
}
export const GetMergedOrClosedDate = (pullRequest: IPullRequest): string => {
let mergedOrClosedAt = pullRequest.mergedAt
if (mergedOrClosedAt == null) mergedOrClosedAt = pullRequest.closedAt
return mergedOrClosedAt
}
export const GetLeadTimeForPullRequest = (pullRequest: IPullRequest) => {
// parse createAt as date from string
const createAt = new Date(pullRequest.createdAt)
const mergedOrClosedAt = new Date(GetMergedOrClosedDate(pullRequest))
const duration = mergedOrClosedAt.getTime() - createAt.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRCreated = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const createAtEvent = eventTimeline.find((event) => event.type === 'createAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (!createAtEvent || !firstCommitEvent) return 0
const duration = createAtEvent.date.getTime() - firstCommitEvent.date.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRMerged = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (mergedAtEvent && firstCommitEvent && mergedAtEvent.date.getTime() > firstCommitEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - firstCommitEvent.date.getTime()
}
return -1
}
export const GetTimeToMergeAfterLastReview = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return -1
}
const lastReviewEvent = reviewEvents.reverse()[0]
if (mergedAtEvent && lastReviewEvent && mergedAtEvent.date.getTime() > lastReviewEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - lastReviewEvent.date.getTime()
}
return -1
}
export const GetTotalRuntimeForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let totalTime = 0
statusCheckEvents.forEach((statusCheck) => {
totalTime += new Date(
|
statusCheck.completedAt).getTime() - new Date(statusCheck.startedAt).getTime()
})
return totalTime
}
|
export const GetTimeSpendInPrForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let earliestStart = new Date()
let latestCompletion = new Date(0, 0, 0)
statusCheckEvents.forEach((statusCheckEvent) => {
const completedDate = new Date(statusCheckEvent.completedAt)
const startedDate = new Date(statusCheckEvent.startedAt)
if (startedDate < earliestStart) {
earliestStart = startedDate
}
if (completedDate > latestCompletion) {
latestCompletion = completedDate
}
})
return latestCompletion.getTime() - earliestStart.getTime()
}
const FilterReviewsByState = (pullRequest: IPullRequest, state: string) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return []
}
const filteredReviews = reviewEvents.filter((reviewEvent) => {
const review = reviewEvent.event_instance as IPullRequestReview
return review.state === state
})
return filteredReviews
}
export const GetNumberOfCommentOnlyReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'COMMENTED').length
}
export const GetNumberOfRequestedChangeReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'CHANGES_REQUESTED').length
}
export const GetNumberOfApprovedReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'APPROVED').length
}
export const GetUniqueReviewParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
// extract unique reviewers from review events
return reviewEvents
.map((reviewEvent) => reviewEvent.event_instance as IPullRequestReview)
.map((review) => review.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommentParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commentEvents = eventTimeline.filter((event) => event.type === 'comment')
// extract unique commenter from review events
return commentEvents
.map((commentEvent) => commentEvent.event_instance as IPullRequestComment)
.map((comment) => comment.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommitterParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commitEvents = eventTimeline.filter((event) => event.type === 'commit')
// extract unique reviewers from review events
return commitEvents
.map((commitEvent) => commitEvent.event_instance as IPullRequestCommit)
.map((commit) => commit.authors.filter((author) => author.login !== null).map((author) => author.login))
.flat()
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetNumberOfActivePullRequestReviewParticipants = (pullRequest: IPullRequest) => {
const uniqueReviewers = GetUniqueReviewParticipants(pullRequest)
const uniqueCommenter = GetUniqueCommentParticipants(pullRequest)
return uniqueReviewers.concat(uniqueCommenter).filter((value, index, self) => self.indexOf(value) === index).length
}
export const GetNumberOfPullRequestCommitter = (pullRequest: IPullRequest) => {
return GetUniqueCommitterParticipants(pullRequest).length
}
export const GetTotalNumberOfParticipants = (pullRequest: IPullRequest) => {
return GetNumberOfActivePullRequestReviewParticipants(pullRequest) + GetNumberOfPullRequestCommitter(pullRequest)
}
|
src/Report.Calculation.ts
|
philips-software-pull-request-report-action-3390d78
|
[
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " startedAt: string\n completedAt: string\n conclusion: string\n status: string\n name: string\n detailsUrl: string\n }\n const statusCheck = new StatusCheck()\n statusCheck.workflowName = jsonObject['workflowName']\n statusCheck.startedAt = jsonObject['startedAt']",
"score": 0.7880957722663879
},
{
"filename": "src/Interfaces/PullRequestTypes.ts",
"retrieved_chunk": " committer: string\n authorDate: string\n commitDate: string\n commitHeader: string\n commitBody: string\n commitId: string\n}\nexport interface IStatusCheck {\n workflowName: string\n startedAt: string",
"score": 0.762831449508667
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " statusCheck.completedAt = jsonObject['completedAt']\n statusCheck.conclusion = jsonObject['conclusion']\n statusCheck.status = jsonObject['status']\n statusCheck.name = jsonObject['name']\n return statusCheck\n }\n}\nexport class PullRequest implements IPullRequest {\n public id = 0\n public title = ''",
"score": 0.7536261081695557
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " public workflowName = ''\n public startedAt = ''\n public completedAt = ''\n public conclusion = ''\n public status = ''\n public name = ''\n public detailsUrl = ''\n public static CreateFromJson(json: unknown): IStatusCheck {\n const jsonObject = json as {\n workflowName: string",
"score": 0.7466705441474915
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " // eslint-disable-next-line @typescript-eslint/unbound-method\n commit.authors = ParseArrayOfType<ICommitAuthor>(jsonObject['authors'], CommitAuthor.CreateFromJson)\n commit.commitDate = jsonObject['committedDate']\n commit.commitHeader = jsonObject['messageHeadline']\n commit.commitBody = jsonObject['messageBody']\n commit.commitId = jsonObject['oid']\n return commit\n }\n}\nexport class StatusCheck implements IStatusCheck {",
"score": 0.7449684143066406
}
] |
typescript
|
statusCheck.completedAt).getTime() - new Date(statusCheck.startedAt).getTime()
})
return totalTime
}
|
// for license and copyright look at the repository
import { IReport, IReportConfigurationEntry, IReportConfigInfo, PullRequestCallback } from './Interfaces/ReportTypes'
export enum ConfigurationCategory {
None,
StaticMeasures,
TimeRelatedMeasures,
StatusCheckRelatedMeasures,
ReportGeneratorValue,
}
export const ConfigurationCategoryTitleMap = new Map<ConfigurationCategory, string>([
[ConfigurationCategory.None, 'None'],
[ConfigurationCategory.StaticMeasures, 'Static measures'],
[ConfigurationCategory.TimeRelatedMeasures, 'Time related measures'],
[ConfigurationCategory.StatusCheckRelatedMeasures, 'Status check related measures'],
[ConfigurationCategory.ReportGeneratorValue, 'Report generator related predefined strings'],
])
export class ConfigurationInfo implements IReportConfigInfo {
public Description
public PresentationValue
public Value
public ConfigurationName
public ConfigValue
public ConfigurationCategory
constructor(
label: string,
presentationValue: string | number,
value: string | number,
configName: string,
defaultConfigValue: string | number,
configurationCategory: ConfigurationCategory,
) {
this.Description = label
this.PresentationValue = presentationValue
this.Value = value
this.ConfigurationName = configName
this.ConfigValue = defaultConfigValue
this.ConfigurationCategory = configurationCategory
}
}
export class ReportConfigurationEntry implements IReportConfigurationEntry {
public Id
public Info
|
public PullRequestCallback: PullRequestCallback
constructor(id = '', info: IReportConfigInfo, measureCallback: PullRequestCallback = () => '') {
|
this.Id = id
this.Info = info
this.PullRequestCallback = measureCallback
}
}
export class Report implements IReport {
public Id = ''
public Description = ''
public Entries: ReportConfigurationEntry[] = []
}
|
src/Report.Definitions.ts
|
philips-software-pull-request-report-action-3390d78
|
[
{
"filename": "src/Interfaces/ReportTypes.ts",
"retrieved_chunk": " ConfigValue: string | number\n}\nexport interface IReportConfigurationEntry {\n Id: string\n Info: IReportConfigInfo\n PullRequestCallback: PullRequestCallback\n}\nexport interface IReport {\n Id: string\n Description: string",
"score": 0.8971418738365173
},
{
"filename": "src/Interfaces/ReportTypes.ts",
"retrieved_chunk": "// for license and copyright look at the repository\nimport { ConfigurationCategory } from '../Report.Definitions'\nimport { IPullRequest } from './PullRequestTypes'\nexport type PullRequestCallback = (pr: IPullRequest) => string | number\nexport interface IReportConfigInfo {\n Description: string\n PresentationValue: string | number\n Value: string | number\n ConfigurationName: string\n ConfigurationCategory: ConfigurationCategory",
"score": 0.8449760675430298
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " new ConfigurationInfo(\n 'Pull Request Report',\n 0,\n 0,\n 'ReportTitle',\n 'Pull Request Report',\n ConfigurationCategory.ReportGeneratorValue,\n ),\n () => 0,\n ),",
"score": 0.8251394629478455
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " new ReportConfigurationEntry(\n 'commits',\n new ConfigurationInfo(\n 'Number of commits',\n 0,\n 0,\n 'ShowNumberOfCommits',\n 'yes',\n ConfigurationCategory.StaticMeasures,\n ),",
"score": 0.8242505788803101
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " // get the property value of inputs\n entry.Info.ConfigValue = (configValues as { [key: string]: string | number })[entry.Info.ConfigurationName]\n })\n return measurementEntries\n}\nexport const GetActiveMeasures = (entries: Array<ReportConfigurationEntry>): Array<ReportConfigurationEntry> => {\n return entries.filter((entry) => entry.Info.ConfigValue === 'yes')\n}\nexport const ReportConfigurationTable = new Array<ReportConfigurationEntry>()\nReportConfigurationTable.push(",
"score": 0.8115735054016113
}
] |
typescript
|
public PullRequestCallback: PullRequestCallback
constructor(id = '', info: IReportConfigInfo, measureCallback: PullRequestCallback = () => '') {
|
/**
* @swagger
* components:
* schemas:
* User:
* type: object
* required:
* - name
* - email
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* format: email
* description: The user email address
* password:
* type: string
* description: The user password (hashed)
* role:
* type: string
* enum: [user, admin]
* description: The user role
* default: user
* example:
* name: John Doe
* email: johndoe@example.com
* password: $2a$10$gR06R4K1NM4p4b4ELq.LlOTzq3Dcxj2iPwE5U/O2MDE70o9noemhO
* role: user
*/
import express from 'express';
import { deleteUser, fetchUsers } from '../service';
import { protect, restrictTo } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/users:
* get:
* summary: Retrieve all users
* tags: [User]
* security:
* - bearerAuth: []
* responses:
* "200":
* description: A list of users
* content:
* application/json:
* schema:
* type: array
* items:
* $ref: '#/components/schemas/User'
* "401":
* description: Unauthorized
*/
router.
|
get('/', protect, restrictTo('admin'), fetchUsers);
|
/**
* @swagger
* /api/v1/users/{id}:
* delete:
* summary: Delete a user by ID
* tags: [User]
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: id
* schema:
* type: string
* required: true
* description: The ID of the user to delete
* responses:
* "204":
* description: User deleted successfully
* "401":
* description: Unauthorized
* "404":
* description: User not found
*/
// A simple case where users can only delete themselves not the admin
router.delete('/:id', restrictTo('user'), deleteUser);
export default router;
|
src/modules/auth/controller/users.ts
|
walosha-BACKEND_DEV_TESTS-db2fcb4
|
[
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * security:\n * - bearerAuth: []\n * responses:\n * \"200\":\n * description: The user profile\n * \"401\":\n * description: Unauthorized\n */\nrouter.post('/me', protect, getMe);\nexport default router;",
"score": 0.8655178546905518
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/User'\n */\nrouter.post('/signup', signup);\n/**\n * @swagger\n * /api/v1/auth/login:\n * post:",
"score": 0.8450241088867188
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * description: The authenticated user.\n * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/User'\n */\nrouter.post('/login', login);\n/**\n * @swagger\n * /api/v1/auth/refresh:",
"score": 0.8376404643058777
},
{
"filename": "src/modules/auth/service/index.ts",
"retrieved_chunk": " const refresh = refreshToken(user);\n return res.status(200).json({ status: 'sucess', refresh });\n}\nexport async function fetchUsers(req: Request, res: Response) {\n const body = req.body;\n console.log({ body });\n try {\n const users = await User.find();\n return res.status(200).json({ message: 'sucessfully fetch users', data: users });\n } catch (error: any) {",
"score": 0.834341824054718
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * \"401\":\n * description: Invalid or expired token or refresh token was already used\n */\nrouter.post('/refresh', refreshMiddleware, refresh);\n/**\n * @swagger\n * /api/v1/auth/me:\n * post:\n * summary: Get user profile\n * tags: [Auth]",
"score": 0.8334777355194092
}
] |
typescript
|
get('/', protect, restrictTo('admin'), fetchUsers);
|
// for license and copyright look at the repository
import {
IPullRequest,
IPullRequestComment,
IPullRequestCommit,
IPullRequestReview,
} from './Interfaces/PullRequestTypes'
import { EventWithTime } from './Interfaces/ReportTypes'
import { StatusCheck } from './PullRequest.Definitions'
export const GenerateEventTimeline = (pullRequest: IPullRequest): EventWithTime[] => {
const events: EventWithTime[][] = []
// merge all interesting events into a single list
events.push([
{ type: 'createAt', date: new Date(pullRequest.createdAt), event_instance: pullRequest.createdAt, time: 0 },
])
events.push(
pullRequest.commits.map((commit) => ({
type: 'commit',
date: new Date(commit.authorDate),
event_instance: commit,
time: 0,
})),
)
events.push(
pullRequest.reviews.map((review) => ({
type: 'review',
date: new Date(review.submittedAt),
event_instance: review,
time: 0,
})),
)
events.push(
pullRequest.statusChecks.map((statusCheck) => ({
type: 'statusCheck',
date: new Date(statusCheck.completedAt),
event_instance: statusCheck,
time: 0,
})),
)
events.push(
pullRequest.comments.map((comment) => ({
type: 'comment',
date: new Date(comment.createdAt),
event_instance: comment,
time: 0,
})),
)
events.push([
{ type: 'mergedAt', date: new Date(pullRequest.mergedAt), event_instance: pullRequest.mergedAt, time: 0 },
])
events.push([
{ type: 'closedAt', date: new Date(pullRequest.closedAt), event_instance: pullRequest.closedAt, time: 0 },
])
// flatten the list
const flattenedEvents = events.flat()
// filter out events that don't have a valid date
const filteredEvents = flattenedEvents.filter((event) => event.date !== null)
// sort the events by date
filteredEvents.sort((a, b) => a.date.getTime() - b.date.getTime())
// now, create a list of events with the time between events
const eventsWithTime: EventWithTime[] = []
// calculate the time between events
for (let i = 0; i < filteredEvents.length; i++) {
if (i === 0) {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: 0,
event_instance: filteredEvents[i].event_instance,
})
} else {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: (filteredEvents[i].date.getTime() - filteredEvents[i - 1].date.getTime()) / 1000,
event_instance: filteredEvents[i].event_instance,
})
}
}
return eventsWithTime
}
export const MillisecondsToReadableDuration = (leadTimeInMSec: number) => {
const seconds = +(leadTimeInMSec / 1000).toFixed(1)
const minutes = +(leadTimeInMSec / (1000 * 60)).toFixed(1)
const hours = +(leadTimeInMSec / (1000 * 60 * 60)).toFixed(1)
const days = +(leadTimeInMSec / (1000 * 60 * 60 * 24)).toFixed(1)
if (seconds < 60) return `${seconds} Sec`
else if (minutes < 60) return `${minutes} Min`
else if (hours < 24) return `${hours} Hours`
else return `${days} Days`
}
export const GetMergedOrClosedDate = (pullRequest: IPullRequest): string => {
let mergedOrClosedAt = pullRequest.mergedAt
if (mergedOrClosedAt == null) mergedOrClosedAt = pullRequest.closedAt
return mergedOrClosedAt
}
export const GetLeadTimeForPullRequest = (pullRequest: IPullRequest) => {
// parse createAt as date from string
const createAt = new Date(pullRequest.createdAt)
const mergedOrClosedAt = new Date(GetMergedOrClosedDate(pullRequest))
const duration = mergedOrClosedAt.getTime() - createAt.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRCreated = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const createAtEvent = eventTimeline.find((event) => event.type === 'createAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (!createAtEvent || !firstCommitEvent) return 0
const duration = createAtEvent.date.getTime() - firstCommitEvent.date.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRMerged = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (mergedAtEvent && firstCommitEvent && mergedAtEvent.date.getTime() > firstCommitEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - firstCommitEvent.date.getTime()
}
return -1
}
export const GetTimeToMergeAfterLastReview = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return -1
}
const lastReviewEvent = reviewEvents.reverse()[0]
if (mergedAtEvent && lastReviewEvent && mergedAtEvent.date.getTime() > lastReviewEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - lastReviewEvent.date.getTime()
}
return -1
}
export const GetTotalRuntimeForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) =>
|
statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
|
return 0
}
let totalTime = 0
statusCheckEvents.forEach((statusCheck) => {
totalTime += new Date(statusCheck.completedAt).getTime() - new Date(statusCheck.startedAt).getTime()
})
return totalTime
}
export const GetTimeSpendInPrForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let earliestStart = new Date()
let latestCompletion = new Date(0, 0, 0)
statusCheckEvents.forEach((statusCheckEvent) => {
const completedDate = new Date(statusCheckEvent.completedAt)
const startedDate = new Date(statusCheckEvent.startedAt)
if (startedDate < earliestStart) {
earliestStart = startedDate
}
if (completedDate > latestCompletion) {
latestCompletion = completedDate
}
})
return latestCompletion.getTime() - earliestStart.getTime()
}
const FilterReviewsByState = (pullRequest: IPullRequest, state: string) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return []
}
const filteredReviews = reviewEvents.filter((reviewEvent) => {
const review = reviewEvent.event_instance as IPullRequestReview
return review.state === state
})
return filteredReviews
}
export const GetNumberOfCommentOnlyReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'COMMENTED').length
}
export const GetNumberOfRequestedChangeReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'CHANGES_REQUESTED').length
}
export const GetNumberOfApprovedReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'APPROVED').length
}
export const GetUniqueReviewParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
// extract unique reviewers from review events
return reviewEvents
.map((reviewEvent) => reviewEvent.event_instance as IPullRequestReview)
.map((review) => review.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommentParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commentEvents = eventTimeline.filter((event) => event.type === 'comment')
// extract unique commenter from review events
return commentEvents
.map((commentEvent) => commentEvent.event_instance as IPullRequestComment)
.map((comment) => comment.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommitterParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commitEvents = eventTimeline.filter((event) => event.type === 'commit')
// extract unique reviewers from review events
return commitEvents
.map((commitEvent) => commitEvent.event_instance as IPullRequestCommit)
.map((commit) => commit.authors.filter((author) => author.login !== null).map((author) => author.login))
.flat()
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetNumberOfActivePullRequestReviewParticipants = (pullRequest: IPullRequest) => {
const uniqueReviewers = GetUniqueReviewParticipants(pullRequest)
const uniqueCommenter = GetUniqueCommentParticipants(pullRequest)
return uniqueReviewers.concat(uniqueCommenter).filter((value, index, self) => self.indexOf(value) === index).length
}
export const GetNumberOfPullRequestCommitter = (pullRequest: IPullRequest) => {
return GetUniqueCommitterParticipants(pullRequest).length
}
export const GetTotalNumberOfParticipants = (pullRequest: IPullRequest) => {
return GetNumberOfActivePullRequestReviewParticipants(pullRequest) + GetNumberOfPullRequestCommitter(pullRequest)
}
|
src/Report.Calculation.ts
|
philips-software-pull-request-report-action-3390d78
|
[
{
"filename": "src/Report.Functions.ts",
"retrieved_chunk": "}\nexport const GetCommitsCount = (pr: IPullRequest): number => {\n return pr.fileChangeSummary.commits\n}\nexport const GetReviewCount = (pr: IPullRequest): number => {\n return pr.reviews.length\n}\nexport const GetCommentCount = (pr: IPullRequest): number => {\n return pr.comments.length\n}",
"score": 0.8143604397773743
},
{
"filename": "src/run.ts",
"retrieved_chunk": " if (process.env.GITHUB_EVENT_NAME !== 'pull_request') {\n core.setFailed('Action is running outside of PR context')\n return 0\n }\n UpdateConfig(inputsFromWorkflow, ReportConfigurationTable)\n const activeConfigValues = GetActiveMeasures(ReportConfigurationTable)\n // get PR data from github cli\n const cliPullRequestData = await GetPullRequestData(github.context.issue.number)\n const cliPullRequestDataAsString = SanitizeMarkdownComment(JSON.stringify(cliPullRequestData))\n // transform PR data to a typed model",
"score": 0.7942326664924622
},
{
"filename": "src/Report.Functions.ts",
"retrieved_chunk": "// for license and copyright look at the repository\nimport { IPullRequest } from './Interfaces/PullRequestTypes'\nexport const GetAddedLines = (pr: IPullRequest): number => {\n return pr.fileChangeSummary.additions\n}\nexport const GetDeletedLines = (pr: IPullRequest): number => {\n return pr.fileChangeSummary.deletions\n}\nexport const GetChangedFilesCount = (pr: IPullRequest): number => {\n return pr.fileChangeSummary.changedFilesList",
"score": 0.7941436767578125
},
{
"filename": "src/run.ts",
"retrieved_chunk": " report.Entries = activeConfigValues\n report.Description = 'Test report'\n report.Id = pullRequestDataModel.id.toString()\n return report\n}\nconst IsConfigValueYes = (configValue: string): boolean => {\n return configValue.trim().toLowerCase() === 'yes'\n}\nexport const run = async (inputsFromWorkflow: ConfigurationInputs): Promise<number> => {\n // take care that action is running only in PR context",
"score": 0.790191113948822
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": "// for license and copyright look at the repository\nimport {\n MillisecondsToReadableDuration,\n GetLeadTimeForPullRequest,\n GetTimeSpendOnBranchBeforePRCreated,\n GetTimeSpendOnBranchBeforePRMerged,\n GetTimeToMergeAfterLastReview,\n GetTotalRuntimeForLastStatusCheckRun,\n GetTimeSpendInPrForLastStatusCheckRun,\n GetNumberOfCommentOnlyReviews,",
"score": 0.7892327308654785
}
] |
typescript
|
statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
|
// for license and copyright look at the repository
import { IPullRequest } from './Interfaces/PullRequestTypes'
import { IReport, IReportConfigurationEntry } from './Interfaces/ReportTypes'
import { tsMarkdown, table, TableEntry, H1Entry, H3Entry, MarkdownEntry } from 'ts-markdown'
import { ConfigurationCategory, ConfigurationCategoryTitleMap } from './Report.Definitions'
export class ReportGenerator {
DescriptionHeaderLabel = 'Description'
ValueHeaderLabel = 'Value'
public Generate(pr: IPullRequest, report: IReport): string {
const header = this.GenerateHeader(pr, report)
const table = this.GenerateMeasureTable(pr, report)
const reportElements = [header, ...table]
return tsMarkdown(reportElements)
}
public GenerateHeader(pr: IPullRequest, report: IReport): H1Entry {
const title = { h1: `${report.Description} (#${pr.id})` }
return title
}
public GetMeasurementEntries(entries: IReportConfigurationEntry[]): IReportConfigurationEntry[] {
if (entries !== undefined && entries !== null && entries.length > 0) {
return entries.filter((entry) => ConfigurationCategory[entry.Info.ConfigurationCategory].endsWith('Measures'))
}
return []
}
public GenerateMeasureTable(pr: IPullRequest, report: IReport): MarkdownEntry[] {
const tables: MarkdownEntry[] = []
const entries = this.GetMeasurementEntries(report.Entries)
const categories = new Set(entries.map((entry) => entry.Info.ConfigurationCategory))
categories.forEach((category) => {
tables.push(this.GenerateCategoryTitle(category))
tables.push(this.GenerateCategoryTable(pr, report, category))
})
return tables
}
private GenerateCategoryTitle(measureCategory: ConfigurationCategory): H3Entry {
const title = { h3: `${ConfigurationCategoryTitleMap.get(measureCategory) || 'No category'}` }
return title
}
private GenerateCategoryTable(pr: IPullRequest, report: IReport, measureCategory: ConfigurationCategory): TableEntry {
const entries = this.GetMeasurementEntries(report.Entries)
const categoryEntries = entries.filter((entry) => entry.Info.ConfigurationCategory === measureCategory)
categoryEntries.forEach((entry) => {
entry
|
.Info.Value = entry.PullRequestCallback(pr)
})
const rows = categoryEntries.map((entry) => ({
|
Description: entry.Info.Description,
Value: entry.Info.Value,
}))
return table({
columns: [{ name: this.DescriptionHeaderLabel }, { name: this.ValueHeaderLabel }],
rows: rows,
})
}
}
|
src/Report.Generation.ts
|
philips-software-pull-request-report-action-3390d78
|
[
{
"filename": "src/Report.Definitions.ts",
"retrieved_chunk": "// for license and copyright look at the repository\nimport { IReport, IReportConfigurationEntry, IReportConfigInfo, PullRequestCallback } from './Interfaces/ReportTypes'\nexport enum ConfigurationCategory {\n None,\n StaticMeasures,\n TimeRelatedMeasures,\n StatusCheckRelatedMeasures,\n ReportGeneratorValue,\n}\nexport const ConfigurationCategoryTitleMap = new Map<ConfigurationCategory, string>([",
"score": 0.8389900326728821
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " // get the property value of inputs\n entry.Info.ConfigValue = (configValues as { [key: string]: string | number })[entry.Info.ConfigurationName]\n })\n return measurementEntries\n}\nexport const GetActiveMeasures = (entries: Array<ReportConfigurationEntry>): Array<ReportConfigurationEntry> => {\n return entries.filter((entry) => entry.Info.ConfigValue === 'yes')\n}\nexport const ReportConfigurationTable = new Array<ReportConfigurationEntry>()\nReportConfigurationTable.push(",
"score": 0.831871509552002
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " 0,\n 0,\n 'ShowNumberOfApprovedReviews',\n 'yes',\n ConfigurationCategory.StaticMeasures,\n ),\n (pr) => GetNumberOfApprovedReviews(pr),\n ),\n)\nReportConfigurationTable.push(",
"score": 0.8306342363357544
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " ConfigurationCategory.TimeRelatedMeasures,\n ),\n (pr) => MillisecondsToReadableDuration(GetLeadTimeForPullRequest(pr)),\n ),\n)\nReportConfigurationTable.push(\n new ReportConfigurationEntry(\n 'pr_time_branch_before_pr',\n new ConfigurationInfo(\n 'Time that was spend on the branch before the PR was created',",
"score": 0.8215541839599609
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " ConfigurationCategory.StaticMeasures,\n ),\n (pr) => GetNumberOfRequestedChangeReviews(pr),\n ),\n)\nReportConfigurationTable.push(\n new ReportConfigurationEntry(\n 'no_of_approved_reviews',\n new ConfigurationInfo(\n 'Number of reviews that approved the Pull Request',",
"score": 0.8187974095344543
}
] |
typescript
|
.Info.Value = entry.PullRequestCallback(pr)
})
const rows = categoryEntries.map((entry) => ({
|
// for license and copyright look at the repository
import { IReport, IReportConfigurationEntry, IReportConfigInfo, PullRequestCallback } from './Interfaces/ReportTypes'
export enum ConfigurationCategory {
None,
StaticMeasures,
TimeRelatedMeasures,
StatusCheckRelatedMeasures,
ReportGeneratorValue,
}
export const ConfigurationCategoryTitleMap = new Map<ConfigurationCategory, string>([
[ConfigurationCategory.None, 'None'],
[ConfigurationCategory.StaticMeasures, 'Static measures'],
[ConfigurationCategory.TimeRelatedMeasures, 'Time related measures'],
[ConfigurationCategory.StatusCheckRelatedMeasures, 'Status check related measures'],
[ConfigurationCategory.ReportGeneratorValue, 'Report generator related predefined strings'],
])
export class ConfigurationInfo implements IReportConfigInfo {
public Description
public PresentationValue
public Value
public ConfigurationName
public ConfigValue
public ConfigurationCategory
constructor(
label: string,
presentationValue: string | number,
value: string | number,
configName: string,
defaultConfigValue: string | number,
configurationCategory: ConfigurationCategory,
) {
this.Description = label
this.PresentationValue = presentationValue
this.Value = value
this.ConfigurationName = configName
this.ConfigValue = defaultConfigValue
this.ConfigurationCategory = configurationCategory
}
}
export class ReportConfigurationEntry implements IReportConfigurationEntry {
public Id
public Info
public
|
PullRequestCallback: PullRequestCallback
constructor(id = '', info: IReportConfigInfo, measureCallback: PullRequestCallback = () => '') {
|
this.Id = id
this.Info = info
this.PullRequestCallback = measureCallback
}
}
export class Report implements IReport {
public Id = ''
public Description = ''
public Entries: ReportConfigurationEntry[] = []
}
|
src/Report.Definitions.ts
|
philips-software-pull-request-report-action-3390d78
|
[
{
"filename": "src/Interfaces/ReportTypes.ts",
"retrieved_chunk": " ConfigValue: string | number\n}\nexport interface IReportConfigurationEntry {\n Id: string\n Info: IReportConfigInfo\n PullRequestCallback: PullRequestCallback\n}\nexport interface IReport {\n Id: string\n Description: string",
"score": 0.9023807048797607
},
{
"filename": "src/Interfaces/ReportTypes.ts",
"retrieved_chunk": "// for license and copyright look at the repository\nimport { ConfigurationCategory } from '../Report.Definitions'\nimport { IPullRequest } from './PullRequestTypes'\nexport type PullRequestCallback = (pr: IPullRequest) => string | number\nexport interface IReportConfigInfo {\n Description: string\n PresentationValue: string | number\n Value: string | number\n ConfigurationName: string\n ConfigurationCategory: ConfigurationCategory",
"score": 0.8530457019805908
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " new ConfigurationInfo(\n 'Pull Request Report',\n 0,\n 0,\n 'ReportTitle',\n 'Pull Request Report',\n ConfigurationCategory.ReportGeneratorValue,\n ),\n () => 0,\n ),",
"score": 0.830070436000824
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " new ReportConfigurationEntry(\n 'commits',\n new ConfigurationInfo(\n 'Number of commits',\n 0,\n 0,\n 'ShowNumberOfCommits',\n 'yes',\n ConfigurationCategory.StaticMeasures,\n ),",
"score": 0.8272011280059814
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " GetCommitsCount,\n ),\n)\nReportConfigurationTable.push(\n new ReportConfigurationEntry(\n 'reviews',\n new ConfigurationInfo(\n 'Number of reviews',\n 0,\n 0,",
"score": 0.8128371238708496
}
] |
typescript
|
PullRequestCallback: PullRequestCallback
constructor(id = '', info: IReportConfigInfo, measureCallback: PullRequestCallback = () => '') {
|
// for license and copyright look at the repository
import {
IPullRequest,
IPullRequestComment,
IPullRequestCommit,
IPullRequestReview,
} from './Interfaces/PullRequestTypes'
import { EventWithTime } from './Interfaces/ReportTypes'
import { StatusCheck } from './PullRequest.Definitions'
export const GenerateEventTimeline = (pullRequest: IPullRequest): EventWithTime[] => {
const events: EventWithTime[][] = []
// merge all interesting events into a single list
events.push([
{ type: 'createAt', date: new Date(pullRequest.createdAt), event_instance: pullRequest.createdAt, time: 0 },
])
events.push(
pullRequest.commits.map((commit) => ({
type: 'commit',
date: new Date(commit.authorDate),
event_instance: commit,
time: 0,
})),
)
events.push(
pullRequest.reviews.map((review) => ({
type: 'review',
date: new Date(review.submittedAt),
event_instance: review,
time: 0,
})),
)
events.push(
pullRequest.statusChecks.map((statusCheck) => ({
type: 'statusCheck',
date: new Date(statusCheck.completedAt),
event_instance: statusCheck,
time: 0,
})),
)
events.push(
pullRequest.comments.map((comment) => ({
type: 'comment',
date: new Date(comment.createdAt),
event_instance: comment,
time: 0,
})),
)
events.push([
{ type: 'mergedAt', date: new Date(pullRequest.mergedAt), event_instance: pullRequest.mergedAt, time: 0 },
])
events.push([
{ type: 'closedAt', date: new Date(pullRequest.closedAt), event_instance: pullRequest.closedAt, time: 0 },
])
// flatten the list
const flattenedEvents = events.flat()
// filter out events that don't have a valid date
const filteredEvents = flattenedEvents.filter((event) => event.date !== null)
// sort the events by date
filteredEvents.sort((a, b) => a.date.getTime() - b.date.getTime())
// now, create a list of events with the time between events
const eventsWithTime: EventWithTime[] = []
// calculate the time between events
for (let i = 0; i < filteredEvents.length; i++) {
if (i === 0) {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: 0,
event_instance: filteredEvents[i].event_instance,
})
} else {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: (filteredEvents[i].date.getTime() - filteredEvents[i - 1].date.getTime()) / 1000,
event_instance: filteredEvents[i].event_instance,
})
}
}
return eventsWithTime
}
export const MillisecondsToReadableDuration = (leadTimeInMSec: number) => {
const seconds = +(leadTimeInMSec / 1000).toFixed(1)
const minutes = +(leadTimeInMSec / (1000 * 60)).toFixed(1)
const hours = +(leadTimeInMSec / (1000 * 60 * 60)).toFixed(1)
const days = +(leadTimeInMSec / (1000 * 60 * 60 * 24)).toFixed(1)
if (seconds < 60) return `${seconds} Sec`
else if (minutes < 60) return `${minutes} Min`
else if (hours < 24) return `${hours} Hours`
else return `${days} Days`
}
export const GetMergedOrClosedDate = (pullRequest: IPullRequest): string => {
let mergedOrClosedAt = pullRequest.mergedAt
if (mergedOrClosedAt == null) mergedOrClosedAt = pullRequest.closedAt
return mergedOrClosedAt
}
export const GetLeadTimeForPullRequest = (pullRequest: IPullRequest) => {
// parse createAt as date from string
const createAt = new Date(pullRequest.createdAt)
const mergedOrClosedAt = new Date(GetMergedOrClosedDate(pullRequest))
const duration = mergedOrClosedAt.getTime() - createAt.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRCreated = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const createAtEvent = eventTimeline.find((event) => event.type === 'createAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (!createAtEvent || !firstCommitEvent) return 0
const duration = createAtEvent.date.getTime() - firstCommitEvent.date.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRMerged = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (mergedAtEvent && firstCommitEvent && mergedAtEvent.date.getTime() > firstCommitEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - firstCommitEvent.date.getTime()
}
return -1
}
export const GetTimeToMergeAfterLastReview = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return -1
}
const lastReviewEvent = reviewEvents.reverse()[0]
if (mergedAtEvent && lastReviewEvent && mergedAtEvent.date.getTime() > lastReviewEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - lastReviewEvent.date.getTime()
}
return -1
}
export const GetTotalRuntimeForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let totalTime = 0
statusCheckEvents.forEach((statusCheck) => {
totalTime
|
+= new Date(statusCheck.completedAt).getTime() - new Date(statusCheck.startedAt).getTime()
})
return totalTime
}
|
export const GetTimeSpendInPrForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let earliestStart = new Date()
let latestCompletion = new Date(0, 0, 0)
statusCheckEvents.forEach((statusCheckEvent) => {
const completedDate = new Date(statusCheckEvent.completedAt)
const startedDate = new Date(statusCheckEvent.startedAt)
if (startedDate < earliestStart) {
earliestStart = startedDate
}
if (completedDate > latestCompletion) {
latestCompletion = completedDate
}
})
return latestCompletion.getTime() - earliestStart.getTime()
}
const FilterReviewsByState = (pullRequest: IPullRequest, state: string) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return []
}
const filteredReviews = reviewEvents.filter((reviewEvent) => {
const review = reviewEvent.event_instance as IPullRequestReview
return review.state === state
})
return filteredReviews
}
export const GetNumberOfCommentOnlyReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'COMMENTED').length
}
export const GetNumberOfRequestedChangeReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'CHANGES_REQUESTED').length
}
export const GetNumberOfApprovedReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'APPROVED').length
}
export const GetUniqueReviewParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
// extract unique reviewers from review events
return reviewEvents
.map((reviewEvent) => reviewEvent.event_instance as IPullRequestReview)
.map((review) => review.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommentParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commentEvents = eventTimeline.filter((event) => event.type === 'comment')
// extract unique commenter from review events
return commentEvents
.map((commentEvent) => commentEvent.event_instance as IPullRequestComment)
.map((comment) => comment.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommitterParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commitEvents = eventTimeline.filter((event) => event.type === 'commit')
// extract unique reviewers from review events
return commitEvents
.map((commitEvent) => commitEvent.event_instance as IPullRequestCommit)
.map((commit) => commit.authors.filter((author) => author.login !== null).map((author) => author.login))
.flat()
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetNumberOfActivePullRequestReviewParticipants = (pullRequest: IPullRequest) => {
const uniqueReviewers = GetUniqueReviewParticipants(pullRequest)
const uniqueCommenter = GetUniqueCommentParticipants(pullRequest)
return uniqueReviewers.concat(uniqueCommenter).filter((value, index, self) => self.indexOf(value) === index).length
}
export const GetNumberOfPullRequestCommitter = (pullRequest: IPullRequest) => {
return GetUniqueCommitterParticipants(pullRequest).length
}
export const GetTotalNumberOfParticipants = (pullRequest: IPullRequest) => {
return GetNumberOfActivePullRequestReviewParticipants(pullRequest) + GetNumberOfPullRequestCommitter(pullRequest)
}
|
src/Report.Calculation.ts
|
philips-software-pull-request-report-action-3390d78
|
[
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " startedAt: string\n completedAt: string\n conclusion: string\n status: string\n name: string\n detailsUrl: string\n }\n const statusCheck = new StatusCheck()\n statusCheck.workflowName = jsonObject['workflowName']\n statusCheck.startedAt = jsonObject['startedAt']",
"score": 0.7855672240257263
},
{
"filename": "src/Interfaces/PullRequestTypes.ts",
"retrieved_chunk": " committer: string\n authorDate: string\n commitDate: string\n commitHeader: string\n commitBody: string\n commitId: string\n}\nexport interface IStatusCheck {\n workflowName: string\n startedAt: string",
"score": 0.7623120546340942
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " statusCheck.completedAt = jsonObject['completedAt']\n statusCheck.conclusion = jsonObject['conclusion']\n statusCheck.status = jsonObject['status']\n statusCheck.name = jsonObject['name']\n return statusCheck\n }\n}\nexport class PullRequest implements IPullRequest {\n public id = 0\n public title = ''",
"score": 0.7532986998558044
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " // eslint-disable-next-line @typescript-eslint/unbound-method\n commit.authors = ParseArrayOfType<ICommitAuthor>(jsonObject['authors'], CommitAuthor.CreateFromJson)\n commit.commitDate = jsonObject['committedDate']\n commit.commitHeader = jsonObject['messageHeadline']\n commit.commitBody = jsonObject['messageBody']\n commit.commitId = jsonObject['oid']\n return commit\n }\n}\nexport class StatusCheck implements IStatusCheck {",
"score": 0.744985818862915
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " public workflowName = ''\n public startedAt = ''\n public completedAt = ''\n public conclusion = ''\n public status = ''\n public name = ''\n public detailsUrl = ''\n public static CreateFromJson(json: unknown): IStatusCheck {\n const jsonObject = json as {\n workflowName: string",
"score": 0.7446524500846863
}
] |
typescript
|
+= new Date(statusCheck.completedAt).getTime() - new Date(statusCheck.startedAt).getTime()
})
return totalTime
}
|
/**
* @swagger
* components:
* schemas:
* User:
* type: object
* required:
* - name
* - email
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* format: email
* description: The user email address
* password:
* type: string
* description: The user password (hashed)
* role:
* type: string
* enum: [user, admin]
* description: The user role
* default: user
* example:
* name: John Doe
* email: johndoe@example.com
* password: $2a$10$gR06R4K1NM4p4b4ELq.LlOTzq3Dcxj2iPwE5U/O2MDE70o9noemhO
* role: user
*/
import express from 'express';
import { deleteUser, fetchUsers } from '../service';
import { protect, restrictTo } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/users:
* get:
* summary: Retrieve all users
* tags: [User]
* security:
* - bearerAuth: []
* responses:
* "200":
* description: A list of users
* content:
* application/json:
* schema:
* type: array
* items:
* $ref: '#/components/schemas/User'
* "401":
* description: Unauthorized
*/
router.get
|
('/', protect, restrictTo('admin'), fetchUsers);
|
/**
* @swagger
* /api/v1/users/{id}:
* delete:
* summary: Delete a user by ID
* tags: [User]
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: id
* schema:
* type: string
* required: true
* description: The ID of the user to delete
* responses:
* "204":
* description: User deleted successfully
* "401":
* description: Unauthorized
* "404":
* description: User not found
*/
// A simple case where users can only delete themselves not the admin
router.delete('/:id', restrictTo('user'), deleteUser);
export default router;
|
src/modules/auth/controller/users.ts
|
walosha-BACKEND_DEV_TESTS-db2fcb4
|
[
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * security:\n * - bearerAuth: []\n * responses:\n * \"200\":\n * description: The user profile\n * \"401\":\n * description: Unauthorized\n */\nrouter.post('/me', protect, getMe);\nexport default router;",
"score": 0.8685092926025391
},
{
"filename": "src/modules/auth/service/index.ts",
"retrieved_chunk": " const refresh = refreshToken(user);\n return res.status(200).json({ status: 'sucess', refresh });\n}\nexport async function fetchUsers(req: Request, res: Response) {\n const body = req.body;\n console.log({ body });\n try {\n const users = await User.find();\n return res.status(200).json({ message: 'sucessfully fetch users', data: users });\n } catch (error: any) {",
"score": 0.8413118720054626
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/User'\n */\nrouter.post('/signup', signup);\n/**\n * @swagger\n * /api/v1/auth/login:\n * post:",
"score": 0.8384833931922913
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * \"401\":\n * description: Invalid or expired token or refresh token was already used\n */\nrouter.post('/refresh', refreshMiddleware, refresh);\n/**\n * @swagger\n * /api/v1/auth/me:\n * post:\n * summary: Get user profile\n * tags: [Auth]",
"score": 0.8336937427520752
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * description: The authenticated user.\n * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/User'\n */\nrouter.post('/login', login);\n/**\n * @swagger\n * /api/v1/auth/refresh:",
"score": 0.8304265737533569
}
] |
typescript
|
('/', protect, restrictTo('admin'), fetchUsers);
|
/**
* @swagger
* components:
* schemas:
* SignupRequest:
* type: object
* required:
* - email
* - password
* - name
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* name: John Doe
* email: johndoe@example.com
* password: password123
* LoginRequest:
* type: object
* required:
* - email
* - password
* properties:
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* email: johndoe@example.com
* password: password123
*/
import express from 'express';
import { getMe, login, refresh, signup } from '../service';
import { refreshMiddleware } from '../../../middleware/refresh';
import { protect } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/auth/signup:
* post:
* summary: Creates an account
* tags: [Auth]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/SignupRequest'
* responses:
* "200":
* description: The created user.
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/User'
*/
router.post('/signup', signup);
/**
* @swagger
* /api/v1/auth/login:
* post:
* summary: Login User
* tags: [Auth]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/LoginRequest'
* responses:
* "200":
* description: The authenticated user.
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/User'
*/
router.post('/login', login);
/**
* @swagger
* /api/v1/auth/refresh:
* post:
* summary: Refreshes the access token
* tags: [Auth]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - refresh
* properties:
* refresh:
* type: string
* description: Refresh token
* example: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjY0NGYwMjg0MWRmNGJlYzliOWI3ZjlhYSIsImlhdCI6MTY4Mjg5OTU4OCwiZXhwIjoxNjgzMDcyMzg4fQ.Bt2kzyxyUEtUy9pLvr0zSzpI8_xTaM6KulO2mwYztbQ
* responses:
* "200":
* description: The new access token
* content:
* application/json:
* schema:
* type: object
* properties:
* accessToken:
* type: string
* description: Access token
* example: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJKb2huIERvZSIsImlhdCI6MTUxNjIzOTAyMn0.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c
* "400":
* description: Invalid request or refresh token is not present
* "401":
* description: Invalid or expired token or refresh token was already used
*/
router.post('/refresh', refreshMiddleware, refresh);
/**
* @swagger
* /api/v1/auth/me:
* post:
* summary: Get user profile
* tags: [Auth]
* security:
* - bearerAuth: []
* responses:
* "200":
* description: The user profile
* "401":
* description: Unauthorized
*/
router.post('
|
/me', protect, getMe);
|
export default router;
|
src/modules/auth/controller/index.ts
|
walosha-BACKEND_DEV_TESTS-db2fcb4
|
[
{
"filename": "src/modules/auth/service/index.ts",
"retrieved_chunk": "export const getMe = catchAsync(async (req, res) => {\n const user = req.user;\n // 3) If everything ok, send token to client\n res.status(200).json({ message: 'user sucessfully fetched!', user });\n});\nexport function logout(req: Request, res: Response) {\n res.cookie('jwt', 'loggedout', {\n expires: new Date(Date.now() + 10 * 1000),\n httpOnly: true,\n });",
"score": 0.8428131341934204
},
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": " * items:\n * $ref: '#/components/schemas/User'\n * \"401\":\n * description: Unauthorized\n */\nrouter.get('/', protect, restrictTo('admin'), fetchUsers);\n/**\n * @swagger\n * /api/v1/users/{id}:\n * delete:",
"score": 0.8236273527145386
},
{
"filename": "src/modules/account/controller/index.ts",
"retrieved_chunk": " * description: Invalid request parameters\n * '401':\n * description: Unauthorized request\n */\nrouter.post('/transfer', protect, transferFund);\nexport default router;",
"score": 0.8212709426879883
},
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": " */\nimport express from 'express';\nimport { deleteUser, fetchUsers } from '../service';\nimport { protect, restrictTo } from '../../../middleware';\nconst router = express.Router();\n/**\n * @swagger\n * /api/v1/users:\n * get:\n * summary: Retrieve all users",
"score": 0.8153854608535767
},
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": "router.delete('/:id', restrictTo('user'), deleteUser);\nexport default router;",
"score": 0.8123335242271423
}
] |
typescript
|
/me', protect, getMe);
|
import { sign } from 'jsonwebtoken';
import { IUser } from '../types';
import { Request, Response } from 'express';
import User from '../model';
import { AppError } from '../../../utils/appError';
import { catchAsync } from '../../../utils/catchAsync';
import redisService from '../../../utils/redis';
const accessToken = (user: { _id: string; name: string; email: string; role: string }) => {
return sign(
{ id: user._id, name: user.name, email: user.email, type: process.env.JWT_ACCESS, role: user.role },
process.env.JWT_KEY_SECRET as string,
{
subject: user.email,
expiresIn: process.env.JWT_EXPIRES_IN,
audience: process.env.JWT_AUDIENCE,
issuer: process.env.JWT_ISSUER,
},
);
};
const refreshToken = (user: { _id: string; name: string; email: string; role: string }) => {
return sign(
{ id: user._id, name: user.name, email: user.email, type: process.env.JWT_REFRESH, role: user.role },
process.env.JWT_KEY_REFRESH as string,
{
subject: user.email,
expiresIn: process.env.JWT_EXPIRES_IN,
audience: process.env.JWT_AUDIENCE,
issuer: process.env.JWT_ISSUER,
},
);
};
const createSendToken = (user: IUser, statusCode: number, req: Request, res: Response) => {
const acess = accessToken(user);
const refresh = refreshToken(user);
// Remove password from output
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { name, email, role, ...otherUserData } = user;
res.status(statusCode).json({
status: 'success',
acess,
refresh,
data: {
name,
email,
role,
},
});
};
export const signup = catchAsync(async (req, res) => {
const newUser = await User.create({
name: req.body.name,
email: req.body.email,
password: req.body.password,
});
createSendToken(newUser, 201, req, res);
});
export const login = catchAsync(async
|
(req, res, next) => {
|
const { email, password } = req.body;
// 1) Check if email and password exist
if (!email || !password) {
return next(new AppError('Please provide email and password!', 400));
}
// 2) Check if user exists && password is correct
const user: any = await User.findOne({ email }).select('+password');
if (!user || !(await user.correctPassword(password, user.password))) {
return next(new AppError('Incorrect email or password', 401));
}
// 3) If everything ok, send token to client
createSendToken(user, 200, req, res);
});
export const getMe = catchAsync(async (req, res) => {
const user = req.user;
// 3) If everything ok, send token to client
res.status(200).json({ message: 'user sucessfully fetched!', user });
});
export function logout(req: Request, res: Response) {
res.cookie('jwt', 'loggedout', {
expires: new Date(Date.now() + 10 * 1000),
httpOnly: true,
});
res.status(200).json({ status: 'success' });
}
export async function refresh(req: Request, res: Response) {
const user: any = req.user;
await redisService.set({
key: user?.token,
value: '1',
timeType: 'EX',
time: parseInt(process.env.JWT_REFRESH_TIME || '', 10),
});
const refresh = refreshToken(user);
return res.status(200).json({ status: 'sucess', refresh });
}
export async function fetchUsers(req: Request, res: Response) {
const body = req.body;
console.log({ body });
try {
const users = await User.find();
return res.status(200).json({ message: 'sucessfully fetch users', data: users });
} catch (error: any) {
new AppError(error.message, 201);
}
}
export async function deleteUser(req: Request, res: Response) {
const id = req.params.id;
try {
await User.deleteOne({ _id: id });
return res.status(200).json({ message: 'sucessfully deleted users' });
} catch (error: any) {
new AppError(error.message, 201);
}
}
|
src/modules/auth/service/index.ts
|
walosha-BACKEND_DEV_TESTS-db2fcb4
|
[
{
"filename": "src/middleware/protect.ts",
"retrieved_chunk": "import { NextFunction, Request, Response } from 'express';\nimport { JwtPayload, verify } from 'jsonwebtoken';\nimport { AppError } from '../utils/appError';\nimport { catchAsync } from '../utils/catchAsync';\nimport User from '../modules/auth/model';\nexport const protect = catchAsync(async (req: Request, res: Response, next: NextFunction) => {\n // 1) Getting token and check of it's there\n let token;\n if (req.headers.authorization && req.headers.authorization.startsWith('Bearer')) {\n token = req.headers.authorization.split(' ')[1];",
"score": 0.8590115308761597
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": "import express from 'express';\nimport { getMe, login, refresh, signup } from '../service';\nimport { refreshMiddleware } from '../../../middleware/refresh';\nimport { protect } from '../../../middleware';\nconst router = express.Router();\n/**\n * @swagger\n * /api/v1/auth/signup:\n * post:\n * summary: Creates an account",
"score": 0.8456214070320129
},
{
"filename": "src/middleware/refresh.ts",
"retrieved_chunk": " req.user = {\n email: decoded.email,\n name: decoded.name,\n role: decoded.role,\n token,\n };\n next();\n return;\n } catch (err) {\n console.log({ err });",
"score": 0.8444671630859375
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/User'\n */\nrouter.post('/signup', signup);\n/**\n * @swagger\n * /api/v1/auth/login:\n * post:",
"score": 0.84010249376297
},
{
"filename": "src/middleware/isLoggedIn.ts",
"retrieved_chunk": "/* eslint-disable @typescript-eslint/no-explicit-any */\nimport { NextFunction, Request, Response } from 'express';\nimport jwt from 'jsonwebtoken';\nimport User from '../modules/auth/model';\n// Only for rendered pages, no errors!\nexport async function isLoggedIn(req: Request, res: Response, next: NextFunction) {\n if (req.cookies.jwt) {\n try {\n // 1) verify token\n const decoded: any = await jwt.verify(req.cookies.jwt, process.env.JWT_KEY_SECRET as string);",
"score": 0.8367182016372681
}
] |
typescript
|
(req, res, next) => {
|
/**
* @swagger
* components:
* schemas:
* User:
* type: object
* required:
* - name
* - email
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* format: email
* description: The user email address
* password:
* type: string
* description: The user password (hashed)
* role:
* type: string
* enum: [user, admin]
* description: The user role
* default: user
* example:
* name: John Doe
* email: johndoe@example.com
* password: $2a$10$gR06R4K1NM4p4b4ELq.LlOTzq3Dcxj2iPwE5U/O2MDE70o9noemhO
* role: user
*/
import express from 'express';
import { deleteUser, fetchUsers } from '../service';
import { protect, restrictTo } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/users:
* get:
* summary: Retrieve all users
* tags: [User]
* security:
* - bearerAuth: []
* responses:
* "200":
* description: A list of users
* content:
* application/json:
* schema:
* type: array
* items:
* $ref: '#/components/schemas/User'
* "401":
* description: Unauthorized
*/
router.get('/', protect, restrictTo('admin'), fetchUsers);
/**
* @swagger
* /api/v1/users/{id}:
* delete:
* summary: Delete a user by ID
* tags: [User]
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: id
* schema:
* type: string
* required: true
* description: The ID of the user to delete
* responses:
* "204":
* description: User deleted successfully
* "401":
* description: Unauthorized
* "404":
* description: User not found
*/
// A simple case where users can only delete themselves not the admin
router
|
.delete('/:id', restrictTo('user'), deleteUser);
|
export default router;
|
src/modules/auth/controller/users.ts
|
walosha-BACKEND_DEV_TESTS-db2fcb4
|
[
{
"filename": "src/modules/auth/service/index.ts",
"retrieved_chunk": " new AppError(error.message, 201);\n }\n}\nexport async function deleteUser(req: Request, res: Response) {\n const id = req.params.id;\n try {\n await User.deleteOne({ _id: id });\n return res.status(200).json({ message: 'sucessfully deleted users' });\n } catch (error: any) {\n new AppError(error.message, 201);",
"score": 0.8608141541481018
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * security:\n * - bearerAuth: []\n * responses:\n * \"200\":\n * description: The user profile\n * \"401\":\n * description: Unauthorized\n */\nrouter.post('/me', protect, getMe);\nexport default router;",
"score": 0.8123123645782471
},
{
"filename": "src/modules/auth/service/index.ts",
"retrieved_chunk": "export const getMe = catchAsync(async (req, res) => {\n const user = req.user;\n // 3) If everything ok, send token to client\n res.status(200).json({ message: 'user sucessfully fetched!', user });\n});\nexport function logout(req: Request, res: Response) {\n res.cookie('jwt', 'loggedout', {\n expires: new Date(Date.now() + 10 * 1000),\n httpOnly: true,\n });",
"score": 0.7827121019363403
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * \"401\":\n * description: Invalid or expired token or refresh token was already used\n */\nrouter.post('/refresh', refreshMiddleware, refresh);\n/**\n * @swagger\n * /api/v1/auth/me:\n * post:\n * summary: Get user profile\n * tags: [Auth]",
"score": 0.7826315760612488
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/User'\n */\nrouter.post('/signup', signup);\n/**\n * @swagger\n * /api/v1/auth/login:\n * post:",
"score": 0.7820159196853638
}
] |
typescript
|
.delete('/:id', restrictTo('user'), deleteUser);
|
import { sign } from 'jsonwebtoken';
import { IUser } from '../types';
import { Request, Response } from 'express';
import User from '../model';
import { AppError } from '../../../utils/appError';
import { catchAsync } from '../../../utils/catchAsync';
import redisService from '../../../utils/redis';
const accessToken = (user: { _id: string; name: string; email: string; role: string }) => {
return sign(
{ id: user._id, name: user.name, email: user.email, type: process.env.JWT_ACCESS, role: user.role },
process.env.JWT_KEY_SECRET as string,
{
subject: user.email,
expiresIn: process.env.JWT_EXPIRES_IN,
audience: process.env.JWT_AUDIENCE,
issuer: process.env.JWT_ISSUER,
},
);
};
const refreshToken = (user: { _id: string; name: string; email: string; role: string }) => {
return sign(
{ id: user._id, name: user.name, email: user.email, type: process.env.JWT_REFRESH, role: user.role },
process.env.JWT_KEY_REFRESH as string,
{
subject: user.email,
expiresIn: process.env.JWT_EXPIRES_IN,
audience: process.env.JWT_AUDIENCE,
issuer: process.env.JWT_ISSUER,
},
);
};
const createSendToken = (user: IUser, statusCode: number, req: Request, res: Response) => {
const acess = accessToken(user);
const refresh = refreshToken(user);
// Remove password from output
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { name, email, role, ...otherUserData } = user;
res.status(statusCode).json({
status: 'success',
acess,
refresh,
data: {
name,
email,
role,
},
});
};
|
export const signup = catchAsync(async (req, res) => {
|
const newUser = await User.create({
name: req.body.name,
email: req.body.email,
password: req.body.password,
});
createSendToken(newUser, 201, req, res);
});
export const login = catchAsync(async (req, res, next) => {
const { email, password } = req.body;
// 1) Check if email and password exist
if (!email || !password) {
return next(new AppError('Please provide email and password!', 400));
}
// 2) Check if user exists && password is correct
const user: any = await User.findOne({ email }).select('+password');
if (!user || !(await user.correctPassword(password, user.password))) {
return next(new AppError('Incorrect email or password', 401));
}
// 3) If everything ok, send token to client
createSendToken(user, 200, req, res);
});
export const getMe = catchAsync(async (req, res) => {
const user = req.user;
// 3) If everything ok, send token to client
res.status(200).json({ message: 'user sucessfully fetched!', user });
});
export function logout(req: Request, res: Response) {
res.cookie('jwt', 'loggedout', {
expires: new Date(Date.now() + 10 * 1000),
httpOnly: true,
});
res.status(200).json({ status: 'success' });
}
export async function refresh(req: Request, res: Response) {
const user: any = req.user;
await redisService.set({
key: user?.token,
value: '1',
timeType: 'EX',
time: parseInt(process.env.JWT_REFRESH_TIME || '', 10),
});
const refresh = refreshToken(user);
return res.status(200).json({ status: 'sucess', refresh });
}
export async function fetchUsers(req: Request, res: Response) {
const body = req.body;
console.log({ body });
try {
const users = await User.find();
return res.status(200).json({ message: 'sucessfully fetch users', data: users });
} catch (error: any) {
new AppError(error.message, 201);
}
}
export async function deleteUser(req: Request, res: Response) {
const id = req.params.id;
try {
await User.deleteOne({ _id: id });
return res.status(200).json({ message: 'sucessfully deleted users' });
} catch (error: any) {
new AppError(error.message, 201);
}
}
|
src/modules/auth/service/index.ts
|
walosha-BACKEND_DEV_TESTS-db2fcb4
|
[
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": "import express from 'express';\nimport { getMe, login, refresh, signup } from '../service';\nimport { refreshMiddleware } from '../../../middleware/refresh';\nimport { protect } from '../../../middleware';\nconst router = express.Router();\n/**\n * @swagger\n * /api/v1/auth/signup:\n * post:\n * summary: Creates an account",
"score": 0.8481923341751099
},
{
"filename": "src/middleware/refresh.ts",
"retrieved_chunk": " req.user = {\n email: decoded.email,\n name: decoded.name,\n role: decoded.role,\n token,\n };\n next();\n return;\n } catch (err) {\n console.log({ err });",
"score": 0.8416112065315247
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": "/**\n * @swagger\n * components:\n * schemas:\n * SignupRequest:\n * type: object\n * required:\n * - email\n * - password\n * - name",
"score": 0.8190943002700806
},
{
"filename": "src/modules/account/controller/index.ts",
"retrieved_chunk": "/**\n * @swagger\n * components:\n * schemas:\n * SignupRequest:\n * type: object\n * required:\n * - email\n * - password\n * - name",
"score": 0.8189831376075745
},
{
"filename": "src/middleware/protect.ts",
"retrieved_chunk": "import { NextFunction, Request, Response } from 'express';\nimport { JwtPayload, verify } from 'jsonwebtoken';\nimport { AppError } from '../utils/appError';\nimport { catchAsync } from '../utils/catchAsync';\nimport User from '../modules/auth/model';\nexport const protect = catchAsync(async (req: Request, res: Response, next: NextFunction) => {\n // 1) Getting token and check of it's there\n let token;\n if (req.headers.authorization && req.headers.authorization.startsWith('Bearer')) {\n token = req.headers.authorization.split(' ')[1];",
"score": 0.817732572555542
}
] |
typescript
|
export const signup = catchAsync(async (req, res) => {
|
import { sign } from 'jsonwebtoken';
import { IUser } from '../types';
import { Request, Response } from 'express';
import User from '../model';
import { AppError } from '../../../utils/appError';
import { catchAsync } from '../../../utils/catchAsync';
import redisService from '../../../utils/redis';
const accessToken = (user: { _id: string; name: string; email: string; role: string }) => {
return sign(
{ id: user._id, name: user.name, email: user.email, type: process.env.JWT_ACCESS, role: user.role },
process.env.JWT_KEY_SECRET as string,
{
subject: user.email,
expiresIn: process.env.JWT_EXPIRES_IN,
audience: process.env.JWT_AUDIENCE,
issuer: process.env.JWT_ISSUER,
},
);
};
const refreshToken = (user: { _id: string; name: string; email: string; role: string }) => {
return sign(
{ id: user._id, name: user.name, email: user.email, type: process.env.JWT_REFRESH, role: user.role },
process.env.JWT_KEY_REFRESH as string,
{
subject: user.email,
expiresIn: process.env.JWT_EXPIRES_IN,
audience: process.env.JWT_AUDIENCE,
issuer: process.env.JWT_ISSUER,
},
);
};
const createSendToken = (user: IUser, statusCode: number, req: Request, res: Response) => {
const acess = accessToken(user);
const refresh = refreshToken(user);
// Remove password from output
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { name, email, role, ...otherUserData } = user;
res.status(statusCode).json({
status: 'success',
acess,
refresh,
data: {
name,
email,
role,
},
});
};
export const signup = catchAsync(async (req, res) => {
const newUser = await User.create({
name: req.body.name,
email: req.body.email,
password: req.body.password,
});
createSendToken(newUser, 201, req, res);
});
export const login = catchAsync(async (req, res, next) => {
const { email, password } = req.body;
// 1) Check if email and password exist
if (!email || !password) {
|
return next(new AppError('Please provide email and password!', 400));
|
}
// 2) Check if user exists && password is correct
const user: any = await User.findOne({ email }).select('+password');
if (!user || !(await user.correctPassword(password, user.password))) {
return next(new AppError('Incorrect email or password', 401));
}
// 3) If everything ok, send token to client
createSendToken(user, 200, req, res);
});
export const getMe = catchAsync(async (req, res) => {
const user = req.user;
// 3) If everything ok, send token to client
res.status(200).json({ message: 'user sucessfully fetched!', user });
});
export function logout(req: Request, res: Response) {
res.cookie('jwt', 'loggedout', {
expires: new Date(Date.now() + 10 * 1000),
httpOnly: true,
});
res.status(200).json({ status: 'success' });
}
export async function refresh(req: Request, res: Response) {
const user: any = req.user;
await redisService.set({
key: user?.token,
value: '1',
timeType: 'EX',
time: parseInt(process.env.JWT_REFRESH_TIME || '', 10),
});
const refresh = refreshToken(user);
return res.status(200).json({ status: 'sucess', refresh });
}
export async function fetchUsers(req: Request, res: Response) {
const body = req.body;
console.log({ body });
try {
const users = await User.find();
return res.status(200).json({ message: 'sucessfully fetch users', data: users });
} catch (error: any) {
new AppError(error.message, 201);
}
}
export async function deleteUser(req: Request, res: Response) {
const id = req.params.id;
try {
await User.deleteOne({ _id: id });
return res.status(200).json({ message: 'sucessfully deleted users' });
} catch (error: any) {
new AppError(error.message, 201);
}
}
|
src/modules/auth/service/index.ts
|
walosha-BACKEND_DEV_TESTS-db2fcb4
|
[
{
"filename": "src/middleware/protect.ts",
"retrieved_chunk": "import { NextFunction, Request, Response } from 'express';\nimport { JwtPayload, verify } from 'jsonwebtoken';\nimport { AppError } from '../utils/appError';\nimport { catchAsync } from '../utils/catchAsync';\nimport User from '../modules/auth/model';\nexport const protect = catchAsync(async (req: Request, res: Response, next: NextFunction) => {\n // 1) Getting token and check of it's there\n let token;\n if (req.headers.authorization && req.headers.authorization.startsWith('Bearer')) {\n token = req.headers.authorization.split(' ')[1];",
"score": 0.8597468137741089
},
{
"filename": "src/middleware/refresh.ts",
"retrieved_chunk": " req.user = {\n email: decoded.email,\n name: decoded.name,\n role: decoded.role,\n token,\n };\n next();\n return;\n } catch (err) {\n console.log({ err });",
"score": 0.8592860698699951
},
{
"filename": "src/middleware/protect.ts",
"retrieved_chunk": " } else if (req.cookies.jwt) {\n token = req.cookies.jwt;\n }\n console.log({ token });\n if (!token) {\n return next(new AppError('You are not logged in! Please log in to get access.', 401));\n }\n // 2) Verification token\n const decoded = (await verify(token, process.env.JWT_KEY_SECRET as string)) as JwtPayload;\n console.log({ decoded });",
"score": 0.8543532490730286
},
{
"filename": "src/middleware/isLoggedIn.ts",
"retrieved_chunk": "/* eslint-disable @typescript-eslint/no-explicit-any */\nimport { NextFunction, Request, Response } from 'express';\nimport jwt from 'jsonwebtoken';\nimport User from '../modules/auth/model';\n// Only for rendered pages, no errors!\nexport async function isLoggedIn(req: Request, res: Response, next: NextFunction) {\n if (req.cookies.jwt) {\n try {\n // 1) verify token\n const decoded: any = await jwt.verify(req.cookies.jwt, process.env.JWT_KEY_SECRET as string);",
"score": 0.8426140546798706
},
{
"filename": "src/middleware/error.ts",
"retrieved_chunk": "};\nconst handleValidationErrorDB = (err: any) => {\n const errors = Object.values(err.errors).map((el: any) => el.message);\n const message = `Invalid input data. ${errors.join('. ')}`;\n return new AppError(message, 400);\n};\nconst handleJWTError = () => new AppError('Invalid token. Please log in again!', 401);\nconst handleJWTExpiredError = () => new AppError('Your token has expired! Please log in again.', 401);\nconst sendErrorDev = (err: any, req: Request, res: Response) => {\n // A) API",
"score": 0.8388603925704956
}
] |
typescript
|
return next(new AppError('Please provide email and password!', 400));
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.