prefix
stringlengths 82
32.6k
| middle
stringlengths 5
470
| suffix
stringlengths 0
81.2k
| file_path
stringlengths 6
168
| repo_name
stringlengths 16
77
| context
listlengths 5
5
| lang
stringclasses 4
values | ground_truth
stringlengths 5
470
|
---|---|---|---|---|---|---|---|
/* eslint-disable @typescript-eslint/no-explicit-any */
import fs from 'fs';
import { CoverInfo, CoverInfoFunctionsDetails, CoverInfoLinesDetails } from '../types';
import parseString from 'xml2js';
import * as core from '@actions/core';
const classDetailsFromProjects = (projects: any) => {
let classDetails: any[] = [];
let packageName = null;
const parseFileObject = (fileObj: any, packageName: string) => {
if (fileObj.class) {
fileObj['class'].forEach((classObj: any) => {
classDetails = classDetails.concat({
name: classObj.$.name,
metrics: classObj.metrics[0],
fileName: fileObj.$.name,
fileMetrics: fileObj.metrics[0],
lines: fileObj.line,
packageName: packageName,
});
});
} else {
classDetails = classDetails.concat({
name: null,
metrics: null,
fileName: fileObj.$.name,
fileMetrics: fileObj.metrics[0],
lines: fileObj.line,
packageName: packageName,
});
}
};
projects.forEach((projectObj: any) => {
if (projectObj.package) {
projectObj.package.forEach((data: any) => {
if (data.$?.name) {
packageName = data.$.name;
} else {
packageName = null;
}
data.file.forEach(parseFileObject);
});
}
if (projectObj.file) {
packageName = null;
projectObj.file.forEach(parseFileObject);
}
});
return classDetails;
};
const unpackage = (projects: any): CoverInfo[] => {
const classDetails = classDetailsFromProjects(projects);
return classDetails.map((c: any) => {
|
const methodStats: CoverInfoFunctionsDetails[] = [];
|
const lineStats: CoverInfoLinesDetails[] = [];
if (c.lines) {
c.lines.forEach((l: any) => {
if (l.$.type === 'method') {
methodStats.push({
name: l.$.name,
line: Number(l.$.num),
hit: Number(l.$.count),
});
} else {
lineStats.push({
line: Number(l.$.num),
hit: Number(l.$.count),
});
}
});
}
const classCov: CoverInfo = {
title: c.name,
file: c.fileName,
functions: {
found: methodStats.length,
hit: 0,
details: methodStats,
},
lines: {
found: lineStats.length,
hit: 0,
details: lineStats,
},
branches: {
found: 0,
hit: 0,
details: [],
},
};
classCov.functions.hit = classCov.functions.details.reduce((acc, val) => {
return acc + (val.hit > 0 ? 1 : 0);
}, 0);
classCov.lines.hit = classCov.lines.details.reduce((acc, val) => {
return acc + (val.hit > 0 ? 1 : 0);
}, 0);
return classCov;
});
};
const parseContent = (xml: any): Promise<CoverInfo[]> => {
return new Promise((resolve, reject) => {
parseString.parseString(xml, (err, parseResult) => {
if (err) {
reject(err);
}
if (!parseResult?.coverage?.project) {
return reject(new Error('invalid or missing xml content'));
}
const result = unpackage(parseResult.coverage.project);
resolve(result);
});
});
};
export const parseFile = async (file: string): Promise<CoverInfo[]> => {
return new Promise((resolve, reject) => {
if (!file || file === '') {
core.info('no clover file specified');
resolve([]);
} else {
fs.readFile(
file,
'utf8',
async (err: NodeJS.ErrnoException | null, data: string) => {
if (err) {
core.error(`failed to read file: ${file}. error: ${err.message}`);
reject(err);
} else {
try {
const info = await parseContent(data);
// console.log('====== clover ======');
// console.log(JSON.stringify(info, null, 2));
resolve(info);
} catch (error) {
core.error(`failed to parseContent. err: ${error.message}`);
reject(error);
}
}
},
);
}
});
};
|
src/parsers/clover.ts
|
aGallea-tests-coverage-report-7728fb4
|
[
{
"filename": "src/parsers/jacoco.ts",
"retrieved_chunk": " let output: CoverInfo[] = [];\n packages.forEach((pack: any) => {\n const cov = pack.sourcefile.map((source: any) => {\n const fullPath = pack.$.name + '/' + source.$.name;\n const methods = getCounter(source, 'METHOD');\n const lines = getCounter(source, 'LINE');\n const branches = getCounter(source, 'BRANCH');\n const classCov: CoverInfo = {\n title: source.$.name,\n file: fullPath,",
"score": 0.8654701709747314
},
{
"filename": "src/parsers/jacoco.ts",
"retrieved_chunk": " })[0] || {\n $: {\n covered: 0,\n missed: 0,\n },\n }\n );\n};\nconst unpackage = (report: any): CoverInfo[] => {\n const packages = report.package;",
"score": 0.8629925847053528
},
{
"filename": "src/parsers/cobertura.ts",
"retrieved_chunk": " const packages = coverage.packages;\n const source = coverage.sources[0].source[0];\n const classes = classesFromPackages(packages);\n return classes.map((c) => {\n const branches = extractLcovStyleBranches(c);\n const classCov: CoverInfo = {\n title: c.$.name,\n // file: c.$.filename,\n file: path.join(source, c.$.filename).replace(pwd, ''),\n functions: {",
"score": 0.8466535806655884
},
{
"filename": "src/parsers/cobertura.ts",
"retrieved_chunk": "/* eslint-disable @typescript-eslint/no-explicit-any */\nimport fs from 'fs';\nimport path from 'path';\nimport { CoverInfo, CoverInfoBranchesDetails } from '../types';\nimport parseString from 'xml2js';\nimport * as core from '@actions/core';\nconst classesFromPackages = (packages: any) => {\n const classes: any[] = [];\n packages.forEach((packages: any) => {\n packages.package.forEach((pack: any) => {",
"score": 0.8414440155029297
},
{
"filename": "src/parsers/jacoco.ts",
"retrieved_chunk": " }\n const result = unpackage(parseResult.report);\n resolve(result);\n });\n });\n};\nexport const parseFile = async (file: string): Promise<CoverInfo[]> => {\n return new Promise((resolve, reject) => {\n if (!file || file === '') {\n core.info('no jacoco file specified');",
"score": 0.8413206338882446
}
] |
typescript
|
const methodStats: CoverInfoFunctionsDetails[] = [];
|
/* eslint-disable @typescript-eslint/no-explicit-any */
import fs from 'fs';
import path from 'path';
import { CoverInfo, CoverInfoBranchesDetails } from '../types';
import parseString from 'xml2js';
import * as core from '@actions/core';
const classesFromPackages = (packages: any) => {
const classes: any[] = [];
packages.forEach((packages: any) => {
packages.package.forEach((pack: any) => {
pack.classes.forEach((c: any) => {
classes.push(...c.class);
});
});
});
return classes;
};
const extractLcovStyleBranches = (c: any) => {
const branches: CoverInfoBranchesDetails[] = [];
if (c.lines && c.lines[0].line) {
c.lines[0].line.forEach((l: any) => {
if (l.$.branch == 'true') {
const branchFraction = l.$['condition-coverage'].split(' ');
const branchStats = branchFraction[1].match(/\d+/g);
const coveredBranches = Number(branchStats[0]);
const totalBranches = Number(branchStats[1]);
const leftBranches = totalBranches - coveredBranches;
let branchNumber = 0;
for (let i = 0; i < leftBranches; i++) {
branches.push({
line: Number(l.$.number),
branch: branchNumber,
taken: 0,
});
branchNumber++;
}
for (let i = 0; i < coveredBranches; i++) {
branches.push({
line: Number(l.$.number),
branch: branchNumber,
taken: 1,
});
branchNumber++;
}
}
});
}
return branches;
};
const unpackage = (coverage: any, pwd: string): CoverInfo[] => {
const packages = coverage.packages;
const source = coverage.sources[0].source[0];
const classes = classesFromPackages(packages);
return classes.map((c) => {
const branches = extractLcovStyleBranches(c);
const classCov: CoverInfo = {
title: c.$.name,
// file: c.$.filename,
file: path.join(source, c.$.filename).replace(pwd, ''),
functions: {
found: c.methods && c.methods[0].method ? c.methods[0].method.length : 0,
hit: 0,
details:
!c.methods || !c.methods[0].method
? []
: c.methods[0].method.map((m: any) => {
return {
name: m.$.name,
line: Number(m.lines[0].line[0].$.number),
hit: Number(m.lines[0].line[0].$.hits),
};
}),
},
lines: {
found: c.lines && c.lines[0].line ? c.lines[0].line.length : 0,
hit: 0,
details:
!c.lines || !c.lines[0].line
? []
: c.lines[0].line.map((l: any) => {
return {
line: Number(l.$.number),
hit: Number(l.$.hits),
};
}),
},
branches: {
found: branches.length,
hit: branches.filter((br) => {
return br.taken > 0;
}).length,
details: branches,
},
};
classCov.
|
functions.hit = classCov.functions.details.reduce((acc: any, val: any) => {
|
return acc + (val.hit > 0 ? 1 : 0);
}, 0);
classCov.lines.hit = classCov.lines.details.reduce((acc: any, val: any) => {
return acc + (val.hit > 0 ? 1 : 0);
}, 0);
return classCov;
});
};
const parseContent = (xml: string, pwd: string): Promise<CoverInfo[]> => {
return new Promise((resolve, reject) => {
parseString.parseString(xml, (err, parseResult) => {
if (err) {
return reject(err);
}
if (!parseResult?.coverage) {
return reject(new Error('invalid or missing xml content'));
}
const result = unpackage(parseResult.coverage, pwd);
resolve(result);
});
});
};
export const parseFile = async (file: string, pwd: string): Promise<CoverInfo[]> => {
return new Promise((resolve, reject) => {
if (!file || file === '') {
core.info('no cobertura file specified');
resolve([]);
} else {
fs.readFile(
file,
'utf8',
async (err: NodeJS.ErrnoException | null, data: string) => {
if (err) {
core.error(`failed to read file: ${file}. error: ${err.message}`);
reject(err);
} else {
try {
const info = await parseContent(data, pwd);
// console.log('====== cobertura ======');
// console.log(JSON.stringify(info, null, 2));
resolve(info);
} catch (error) {
core.error(`failed to parseContent. err: ${error.message}`);
reject(error);
}
}
},
);
}
});
};
|
src/parsers/cobertura.ts
|
aGallea-tests-coverage-report-7728fb4
|
[
{
"filename": "src/parsers/clover.ts",
"retrieved_chunk": " hit: 0,\n details: lineStats,\n },\n branches: {\n found: 0,\n hit: 0,\n details: [],\n },\n };\n classCov.functions.hit = classCov.functions.details.reduce((acc, val) => {",
"score": 0.9426869750022888
},
{
"filename": "src/parsers/jacoco.ts",
"retrieved_chunk": " }\n return branches;\n })\n .flat() || [],\n },\n };\n return classCov;\n });\n output = output.concat(cov);\n });",
"score": 0.8976693153381348
},
{
"filename": "src/parsers/clover.ts",
"retrieved_chunk": " const classCov: CoverInfo = {\n title: c.name,\n file: c.fileName,\n functions: {\n found: methodStats.length,\n hit: 0,\n details: methodStats,\n },\n lines: {\n found: lineStats.length,",
"score": 0.8875112533569336
},
{
"filename": "src/parsers/jacoco.ts",
"retrieved_chunk": " functions: {\n found: Number(methods.$.covered) + Number(methods.$.missed),\n hit: Number(methods.$.covered),\n details: pack.class.reduce((result: any, currentClass: any) => {\n return !currentClass.method\n ? result\n : result.concat(\n currentClass.method.map((method: any) => {\n const hit = method.counter.some((counter: any) => {\n return counter.$.type === 'METHOD' && counter.$.covered === '1';",
"score": 0.8855747580528259
},
{
"filename": "src/parsers/jacoco.ts",
"retrieved_chunk": " }),\n },\n branches: {\n found: Number(branches.$.covered) + Number(branches.$.missed),\n hit: Number(branches.$.covered),\n details:\n source.line\n ?.filter((l: any) => {\n return Number(l.$.mb) > 0 || Number(l.$.cb) > 0;\n })",
"score": 0.8854683637619019
}
] |
typescript
|
functions.hit = classCov.functions.details.reduce((acc: any, val: any) => {
|
import { MarkdownRenderChild, MarkdownPostProcessorContext } from 'obsidian';
import { gDrawer } from './global/drawer';
import { ChemPluginSettings } from './settings/base';
import { addBlock, removeBlock } from './global/blocks';
import { i18n } from 'src/lib/i18n';
export class SmilesBlock extends MarkdownRenderChild {
constructor(
private readonly el: HTMLElement,
private readonly markdownSource: string,
private readonly context: MarkdownPostProcessorContext,
private readonly settings: ChemPluginSettings
) {
super(el); // important
addBlock(this);
}
render() {
// TODO: rendering animation
this.el.empty();
const rows = this.markdownSource
.split('\n')
.filter((row) => row.length > 0)
.map((row) => row.trim());
if (rows.length == 1) {
const div = this.el.createDiv({ cls: 'chem-cell' });
this.renderCell(rows[0], div);
} else {
const table = this.el.createDiv({ cls: 'chem-table' });
const maxWidth = this.settings.options?.width ?? 300;
rows.forEach((row) => {
const cell = table.createDiv({ cls: 'chem-cell' });
const svgcell = this.renderCell(row, cell);
if (parseFloat(svgcell.style.width) > maxWidth)
svgcell.style.width = `${maxWidth.toString()}px`;
});
table.style.gridTemplateColumns = `repeat(auto-fill, minmax(${
this.settings.options.width?.toString() ?? '300'
}px, 1fr)`;
}
}
private renderCell = (source: string, target: HTMLElement) => {
const svg = target.createSvg('svg');
gDrawer.draw(
source,
svg,
document.body.hasClass('theme-dark') &&
!document.body.hasClass('theme-light')
? this.settings.darkTheme
|
: this.settings.lightTheme,
null,
(error: object & { name: string; message: string }) => {
|
target.empty();
const ErrorContainer = target.createEl('div');
ErrorContainer.createDiv('error-source').setText(
i18n.t('errors.source.title', { source: source })
);
ErrorContainer.createEl('br');
const ErrorInfo = ErrorContainer.createEl('details');
ErrorInfo.createEl('summary').setText(error.name);
ErrorInfo.createEl('div').setText(error.message);
ErrorContainer.style.wordBreak = `break-word`;
ErrorContainer.style.userSelect = `text`;
//TODO: in multiline block, keep the width sync with the grid setting
if (this.settings.options.scale == 0)
ErrorContainer.style.width = `${
this.settings?.imgWidth.toString() ?? '300'
}px`;
else if (
ErrorContainer.offsetWidth >
(this.settings.options?.width ?? 300)
) {
ErrorContainer.style.width = `${(
this.settings.options?.width ?? 300
).toString()}px`;
ErrorContainer.style.height = `${(
this.settings.options?.height ?? 300
).toString()}px`;
}
}
);
if (this.settings.options.scale == 0)
svg.style.width = `${this.settings.imgWidth.toString()}px`;
return svg;
};
async onload() {
this.render();
}
onunload() {
removeBlock(this); // remove from global block list
}
}
|
src/SmilesBlock.ts
|
Acylation-obsidian-chem-54b1d05
|
[
{
"filename": "src/settings/LivePreview.ts",
"retrieved_chunk": "\t};\n\tprivate renderCell = (\n\t\tsource: string,\n\t\ttarget: HTMLElement,\n\t\tstyle: string\n\t) => {\n\t\tconst svg = target.createSvg('svg');\n\t\tgDrawer.draw(\n\t\t\tsource,\n\t\t\tsvg,",
"score": 0.8830245733261108
},
{
"filename": "src/settings/LivePreview.ts",
"retrieved_chunk": "\t\t\tstyle,\n\t\t\tnull,\n\t\t\t(error: object & { name: string; message: string }) => {\n\t\t\t\ttarget.empty();\n\t\t\t\tconst ErrorContainer = target.createEl('div');\n\t\t\t\tErrorContainer.createDiv('error-source').setText(\n\t\t\t\t\ti18n.t('errors.source.title', { source: source })\n\t\t\t\t);\n\t\t\t\tErrorContainer.createEl('br');\n\t\t\t\tconst ErrorInfo = ErrorContainer.createEl('details');",
"score": 0.8303526043891907
},
{
"filename": "src/settings/LivePreview.ts",
"retrieved_chunk": "import { ChemPluginSettings } from '../settings/base';\nimport { gDrawer } from 'src/global/drawer';\nimport { i18n } from 'src/lib/i18n';\nexport class LivePreview {\n\tcontainer: HTMLDivElement;\n\tlightCard: HTMLDivElement;\n\tdarkCard: HTMLDivElement;\n\tsettings: ChemPluginSettings;\n\tconstructor(\n\t\tprivate readonly el: HTMLElement,",
"score": 0.7877915501594543
},
{
"filename": "src/settings/LivePreview.ts",
"retrieved_chunk": "\t\t);\n\t\tthis.darkCard.empty();\n\t\tconst darkWidth = this.renderCell(\n\t\t\tthis.settings.sample2,\n\t\t\tthis.darkCard,\n\t\t\tthis.settings.darkTheme\n\t\t);\n\t\tif (this.settings.options.scale == 0)\n\t\t\tthis.container.style.gridTemplateColumns = `repeat(auto-fill, minmax(${\n\t\t\t\tthis.settings?.imgWidth.toString() ?? '300'",
"score": 0.7857887744903564
},
{
"filename": "src/global/drawer.ts",
"retrieved_chunk": "import { DEFAULT_SD_OPTIONS, SMILES_DRAWER_OPTIONS } from 'src/settings/base';\nimport SmilesDrawer from 'smiles-drawer';\nexport let gDrawer = new SmilesDrawer.SmiDrawer(DEFAULT_SD_OPTIONS);\nexport const setDrawer = (options: Partial<SMILES_DRAWER_OPTIONS>) => {\n\tgDrawer = new SmilesDrawer.SmiDrawer({ ...DEFAULT_SD_OPTIONS, ...options });\n};\nexport const clearDrawer = () => {\n\tgDrawer = {};\n};",
"score": 0.7838369607925415
}
] |
typescript
|
: this.settings.lightTheme,
null,
(error: object & { name: string; message: string }) => {
|
/* eslint-disable @typescript-eslint/no-explicit-any */
import fs from 'fs';
import { CoverInfo } from '../types';
import * as core from '@actions/core';
const parseContent = (str: string): CoverInfo[] => {
const data: any[] = [];
let item: CoverInfo;
['end_of_record'].concat(str.split('\n')).forEach((line: string) => {
line = line.trim();
const allparts: string[] = line.split(':') || [];
const parts: string[] = [allparts.shift() || '', allparts.join(':')];
let lines: any[];
let fn: any;
switch (parts[0].toUpperCase()) {
case 'TN':
item.title = parts[1].trim();
break;
case 'SF':
item.file = parts.slice(1).join(':').trim();
break;
case 'FNF':
item.functions.found = Number(parts[1].trim());
break;
case 'FNH':
item.functions.hit = Number(parts[1].trim());
break;
case 'LF':
item.lines.found = Number(parts[1].trim());
break;
case 'LH':
item.lines.hit = Number(parts[1].trim());
break;
case 'DA':
lines = parts[1].split(',');
item.lines.details.push({
line: Number(lines[0]),
hit: Number(lines[1]),
});
break;
case 'FN':
fn = parts[1].split(',');
item.functions.details.push({
name: fn[1],
line: Number(fn[0]),
hit: 0,
});
break;
case 'FNDA':
fn = parts[1].split(',');
item.functions.details.some((i: any, k: any) => {
if (i.name === fn[1] && i.hit === undefined) {
item.functions.details[k].hit = Number(fn[0]);
return true;
}
});
break;
case 'BRDA':
fn = parts[1].split(',');
|
item.branches.details.push({
|
line: Number(fn[0]),
block: Number(fn[1]),
branch: Number(fn[2]),
taken: fn[3] === '-' ? 0 : Number(fn[3]),
});
break;
case 'BRF':
item.branches.found = Number(parts[1]);
break;
case 'BRH':
item.branches.hit = Number(parts[1]);
break;
}
if (line === 'end_of_record') {
if (item) {
data.push(item);
}
item = {
title: '',
file: '',
lines: {
found: 0,
hit: 0,
details: [],
},
functions: {
hit: 0,
found: 0,
details: [],
},
branches: {
hit: 0,
found: 0,
details: [],
},
};
}
});
if (!data.length) {
core.info('No lcov file content');
}
return data;
};
export function parseFile(file: string): Promise<CoverInfo[]> {
return new Promise((resolve, reject) => {
if (!file || file === '') {
core.info('no lcov file specified');
resolve([]);
} else {
fs.readFile(
file,
'utf8',
async (err: NodeJS.ErrnoException | null, data: string) => {
if (err) {
core.error(`failed to read file: ${file}. error: ${err.message}`);
reject(err);
} else {
try {
const info = parseContent(data);
// console.log('====== lcov ======');
// console.log(JSON.stringify(info, null, 2));
resolve(info);
} catch (error) {
core.error(`failed to parseContent. err: ${error.message}`);
reject(error);
}
}
},
);
}
});
}
|
src/parsers/lcov.ts
|
aGallea-tests-coverage-report-7728fb4
|
[
{
"filename": "src/parsers/cobertura.ts",
"retrieved_chunk": " }).length,\n details: branches,\n },\n };\n classCov.functions.hit = classCov.functions.details.reduce((acc: any, val: any) => {\n return acc + (val.hit > 0 ? 1 : 0);\n }, 0);\n classCov.lines.hit = classCov.lines.details.reduce((acc: any, val: any) => {\n return acc + (val.hit > 0 ? 1 : 0);\n }, 0);",
"score": 0.8345379829406738
},
{
"filename": "src/parsers/cobertura.ts",
"retrieved_chunk": " c.lines[0].line.forEach((l: any) => {\n if (l.$.branch == 'true') {\n const branchFraction = l.$['condition-coverage'].split(' ');\n const branchStats = branchFraction[1].match(/\\d+/g);\n const coveredBranches = Number(branchStats[0]);\n const totalBranches = Number(branchStats[1]);\n const leftBranches = totalBranches - coveredBranches;\n let branchNumber = 0;\n for (let i = 0; i < leftBranches; i++) {\n branches.push({",
"score": 0.8340206146240234
},
{
"filename": "src/parsers/clover.ts",
"retrieved_chunk": " hit: 0,\n details: lineStats,\n },\n branches: {\n found: 0,\n hit: 0,\n details: [],\n },\n };\n classCov.functions.hit = classCov.functions.details.reduce((acc, val) => {",
"score": 0.8269721269607544
},
{
"filename": "src/parsers/jacoco.ts",
"retrieved_chunk": " .map((l: any) => {\n let branches: any[] = [];\n const count = Number(l.$.mb) + Number(l.$.cb);\n for (let i = 0; i < count; ++i) {\n branches = branches.concat({\n line: Number(l.$.nr),\n block: 0,\n branch: Number(i),\n taken: i < Number(l.$.cb) ? 1 : 0,\n });",
"score": 0.8245111107826233
},
{
"filename": "src/parsers/jacoco.ts",
"retrieved_chunk": " }),\n },\n branches: {\n found: Number(branches.$.covered) + Number(branches.$.missed),\n hit: Number(branches.$.covered),\n details:\n source.line\n ?.filter((l: any) => {\n return Number(l.$.mb) > 0 || Number(l.$.cb) > 0;\n })",
"score": 0.8240817785263062
}
] |
typescript
|
item.branches.details.push({
|
import { App, PluginSettingTab, Setting, SliderComponent } from 'obsidian';
import ChemPlugin from '../main';
import {
DEFAULT_SD_OPTIONS,
SAMPLE_SMILES_1,
SAMPLE_SMILES_2,
themeList,
} from './base';
import { setDrawer } from 'src/global/drawer';
import { refreshBlocks } from 'src/global/blocks';
import { LivePreview } from './LivePreview';
import { i18n } from 'src/lib/i18n';
// Reference: https://smilesdrawer.surge.sh/playground.html
export class ChemSettingTab extends PluginSettingTab {
plugin: ChemPlugin;
constructor({ app, plugin }: { app: App; plugin: ChemPlugin }) {
super(app, plugin);
this.plugin = plugin;
}
display(): void {
const { containerEl } = this;
containerEl.empty();
const scaleSetting = new Setting(containerEl)
.setName(i18n.t('settings.scale.name'))
.setDesc(i18n.t('settings.scale.description'))
.addExtraButton((button) => {
button
.setIcon('rotate-ccw')
.setTooltip(i18n.t('settings.scale.description'))
.onClick(async () => {
this.plugin.settings.options.scale = 1;
scaleSlider.setValue(50);
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
unifyBondLength();
});
});
const scaleLabel = scaleSetting.controlEl.createDiv('slider-readout');
scaleLabel.setText(
(this.plugin.settings.options.scale ?? 1.0).toFixed(2).toString()
);
const scaleSlider = new SliderComponent(scaleSetting.controlEl)
.setValue(50 * (this.plugin.settings.options.scale ?? 1.0))
.setLimits(0.0, 100, 0.5)
.onChange(async (value) => {
this.plugin.settings.options.scale = value / 50;
scaleLabel.setText((value / 50).toFixed(2).toString());
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
if (value == 0) unifyImageWidth();
else unifyBondLength();
});
const widthSettings = new Setting(containerEl);
new Setting(containerEl)
.setName(i18n.t('settings.theme.light.name'))
.setDesc(i18n.t('settings.theme.light.description'))
.addDropdown((dropdown) =>
dropdown
.addOptions(themeList)
.setValue(this.plugin.settings.lightTheme)
.onChange(async (value) => {
this.plugin.settings.lightTheme = value;
await this.plugin.saveSettings();
onSettingsChange();
})
);
new Setting(containerEl)
.setName(i18n.t('settings.theme.dark.name'))
.setDesc(i18n.t('settings.theme.dark.description'))
.addDropdown((dropdown) =>
dropdown
.addOptions(themeList)
.setValue(this.plugin.settings.darkTheme)
.onChange(async (value) => {
this.plugin.settings.darkTheme = value;
await this.plugin.saveSettings();
onSettingsChange();
})
);
new Setting(containerEl)
.setName(i18n.t('settings.preview.title'))
.setHeading();
new Setting(containerEl)
.setName(i18n.t('settings.preview.sample.name'))
.setDesc(i18n.t('settings.preview.sample.description'))
.addText((text) =>
text
.setPlaceholder(SAMPLE_SMILES_1)
.setValue(this.plugin.settings.sample1)
.onChange(async (value) => {
if (value == '') {
value = SAMPLE_SMILES_1;
}
this.plugin.settings.sample1 = value;
await this.plugin.saveSettings();
onSettingsChange();
})
)
.addText((text) =>
text
.setPlaceholder(SAMPLE_SMILES_2)
.setValue(this.plugin.settings.sample2)
.onChange(async (value) => {
if (value == '') {
value = SAMPLE_SMILES_2;
}
this.plugin.settings.sample2 = value;
await this.plugin.saveSettings();
onSettingsChange();
})
);
const preview = new LivePreview(containerEl, this.plugin.settings);
new Setting(containerEl)
.setName(i18n.t('settings.advanced.title'))
.setHeading();
new Setting(containerEl)
.setName(i18n.t('settings.advanced.compact-drawing.name'))
.setDesc(i18n.t('settings.advanced.compact-drawing.description'))
.addToggle((toggle) =>
toggle
.setValue(
this.plugin.settings.options?.compactDrawing ?? false
)
.onChange(async (value) => {
this.plugin.settings.options.compactDrawing = value;
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
})
);
new Setting(containerEl)
.setName(i18n.t('settings.advanced.terminal-carbons.name'))
.setDesc(i18n.t('settings.advanced.terminal-carbons.description'))
.addToggle((toggle) =>
toggle
.setValue(
this.plugin.settings.options?.terminalCarbons ?? false
)
.onChange(async (value) => {
this.plugin.settings.options.terminalCarbons = value;
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
})
);
const onSettingsChange = () => {
|
preview.updateSettings(this.plugin.settings);
|
preview.render();
};
const unifyBondLength = () => {
widthSettings.controlEl.empty();
widthSettings
.setName(i18n.t('settings.unify-bond-length.name'))
.setDesc(i18n.t('settings.unify-bond-length.description'))
.addText((text) =>
text
.setValue(
this.plugin.settings.options.width?.toString() ??
'300'
)
.onChange(async (value) => {
if (value == '') {
value = '300';
}
this.plugin.settings.options.width =
parseInt(value);
this.plugin.settings.options.height =
parseInt(value);
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
})
);
};
const unifyImageWidth = () => {
widthSettings.controlEl.empty();
widthSettings
.setName(i18n.t('settings.unify-bond-length.name'))
.setDesc(i18n.t('settings.unify-bond-length.description'))
.addText((text) => {
text.setValue(
this.plugin.settings?.imgWidth.toString() ?? '300'
)
.setPlaceholder('300')
.onChange(async (value) => {
if (value == '') {
value = '300';
}
this.plugin.settings.imgWidth = parseInt(value);
await this.plugin.saveSettings();
onSettingsChange();
});
});
};
// initialize
preview.render();
if ((this.plugin.settings.options?.scale ?? 1) == 0) unifyImageWidth();
else unifyBondLength();
}
hide(): void {
refreshBlocks();
}
}
|
src/settings/SettingTab.ts
|
Acylation-obsidian-chem-54b1d05
|
[
{
"filename": "src/main.ts",
"retrieved_chunk": "\t\tsetObserver();\n\t\tthis.addSettingTab(new ChemSettingTab({ app: this.app, plugin: this }));\n\t\tthis.registerMarkdownCodeBlockProcessor('smiles', this.smilesProcessor);\n\t}\n\tasync onunload() {\n\t\tdetachObserver();\n\t\tclearBlocks();\n\t\tclearDrawer();\n\t}\n\tasync loadSettings() {",
"score": 0.851181149482727
},
{
"filename": "src/main.ts",
"retrieved_chunk": "\t\tconst candidate = Object.assign({}, await this.loadData());\n\t\tif ('version' in candidate && candidate.version == SETTINGS_VERSION)\n\t\t\tthis.settings = Object.assign({}, DEFAULT_SETTINGS, candidate);\n\t\telse\n\t\t\tthis.settings = Object.assign(\n\t\t\t\t{},\n\t\t\t\tDEFAULT_SETTINGS,\n\t\t\t\tupdateSettingsVersion(candidate)\n\t\t\t);\n\t}",
"score": 0.8384478092193604
},
{
"filename": "src/main.ts",
"retrieved_chunk": "import { setDrawer, clearDrawer } from './global/drawer';\nimport { setObserver, detachObserver } from './themeObserver';\nexport default class ChemPlugin extends Plugin {\n\tsettings: ChemPluginSettings;\n\tasync onload() {\n\t\tawait this.loadSettings();\n\t\t// this.addRibbonIcon('hexagon', 'This is Chem Plugin', () => {});\n\t\t// initialize global variables\n\t\tsetDrawer(this.settings.options);\n\t\tsetBlocks();",
"score": 0.8245044946670532
},
{
"filename": "src/main.ts",
"retrieved_chunk": "\tasync saveSettings() {\n\t\tawait this.saveData(this.settings);\n\t}\n\tsmilesProcessor = (\n\t\tsource: string,\n\t\tel: HTMLElement,\n\t\tctx: MarkdownPostProcessorContext\n\t) => {\n\t\tctx.addChild(new SmilesBlock(el, source, ctx, this.settings)); // pass plugin settings, maybe useful in react settings provider.\n\t};",
"score": 0.8240786790847778
},
{
"filename": "src/settings/LivePreview.ts",
"retrieved_chunk": "\t\t\t}px, 1fr)`;\n\t\telse\n\t\t\tthis.container.style.gridTemplateColumns = `repeat(auto-fill, minmax(${(lightWidth >\n\t\t\tdarkWidth\n\t\t\t\t? lightWidth\n\t\t\t\t: darkWidth\n\t\t\t).toString()}px, 1fr)`;\n\t};\n\tupdateSettings = (argSettings: ChemPluginSettings) => {\n\t\tthis.settings = argSettings;",
"score": 0.8104616403579712
}
] |
typescript
|
preview.updateSettings(this.plugin.settings);
|
import { Plugin, MarkdownPostProcessorContext } from 'obsidian';
import {
DEFAULT_SETTINGS,
ChemPluginSettings,
SETTINGS_VERSION,
} from './settings/base';
import { ChemSettingTab } from './settings/SettingTab';
import { updateSettingsVersion } from './settings/update';
import { SmilesBlock } from './SmilesBlock';
import { setBlocks, clearBlocks } from './global/blocks';
import { setDrawer, clearDrawer } from './global/drawer';
import { setObserver, detachObserver } from './themeObserver';
export default class ChemPlugin extends Plugin {
settings: ChemPluginSettings;
async onload() {
await this.loadSettings();
// this.addRibbonIcon('hexagon', 'This is Chem Plugin', () => {});
// initialize global variables
setDrawer(this.settings.options);
setBlocks();
setObserver();
this.addSettingTab(new ChemSettingTab({ app: this.app, plugin: this }));
this.registerMarkdownCodeBlockProcessor('smiles', this.smilesProcessor);
}
async onunload() {
detachObserver();
clearBlocks();
clearDrawer();
}
async loadSettings() {
const candidate = Object.assign({}, await this.loadData());
if ('version' in candidate && candidate.version == SETTINGS_VERSION)
this.settings = Object.assign({}, DEFAULT_SETTINGS, candidate);
else
this.settings = Object.assign(
{},
DEFAULT_SETTINGS,
updateSettingsVersion(candidate)
);
}
async saveSettings() {
await this.saveData(this.settings);
}
smilesProcessor = (
source: string,
el: HTMLElement,
ctx: MarkdownPostProcessorContext
) => {
|
ctx.addChild(new SmilesBlock(el, source, ctx, this.settings)); // pass plugin settings, maybe useful in react settings provider.
};
|
}
|
src/main.ts
|
Acylation-obsidian-chem-54b1d05
|
[
{
"filename": "src/SmilesBlock.ts",
"retrieved_chunk": "import { MarkdownRenderChild, MarkdownPostProcessorContext } from 'obsidian';\nimport { gDrawer } from './global/drawer';\nimport { ChemPluginSettings } from './settings/base';\nimport { addBlock, removeBlock } from './global/blocks';\nimport { i18n } from 'src/lib/i18n';\nexport class SmilesBlock extends MarkdownRenderChild {\n\tconstructor(\n\t\tprivate readonly el: HTMLElement,\n\t\tprivate readonly markdownSource: string,\n\t\tprivate readonly context: MarkdownPostProcessorContext,",
"score": 0.8512020111083984
},
{
"filename": "src/settings/SettingTab.ts",
"retrieved_chunk": "\t\t\t\t\t.onChange(async (value) => {\n\t\t\t\t\t\tif (value == '') {\n\t\t\t\t\t\t\tvalue = SAMPLE_SMILES_2;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tthis.plugin.settings.sample2 = value;\n\t\t\t\t\t\tawait this.plugin.saveSettings();\n\t\t\t\t\t\tonSettingsChange();\n\t\t\t\t\t})\n\t\t\t);\n\t\tconst preview = new LivePreview(containerEl, this.plugin.settings);",
"score": 0.8329126834869385
},
{
"filename": "src/settings/SettingTab.ts",
"retrieved_chunk": "\t\t\t\t\t\t}\n\t\t\t\t\t\tthis.plugin.settings.sample1 = value;\n\t\t\t\t\t\tawait this.plugin.saveSettings();\n\t\t\t\t\t\tonSettingsChange();\n\t\t\t\t\t})\n\t\t\t)\n\t\t\t.addText((text) =>\n\t\t\t\ttext\n\t\t\t\t\t.setPlaceholder(SAMPLE_SMILES_2)\n\t\t\t\t\t.setValue(this.plugin.settings.sample2)",
"score": 0.8230746984481812
},
{
"filename": "src/SmilesBlock.ts",
"retrieved_chunk": "\t\tprivate readonly settings: ChemPluginSettings\n\t) {\n\t\tsuper(el); // important\n\t\taddBlock(this);\n\t}\n\trender() {\n\t\t// TODO: rendering animation\n\t\tthis.el.empty();\n\t\tconst rows = this.markdownSource\n\t\t\t.split('\\n')",
"score": 0.8153606653213501
},
{
"filename": "src/settings/SettingTab.ts",
"retrieved_chunk": "\t\tnew Setting(containerEl)\n\t\t\t.setName(i18n.t('settings.preview.sample.name'))\n\t\t\t.setDesc(i18n.t('settings.preview.sample.description'))\n\t\t\t.addText((text) =>\n\t\t\t\ttext\n\t\t\t\t\t.setPlaceholder(SAMPLE_SMILES_1)\n\t\t\t\t\t.setValue(this.plugin.settings.sample1)\n\t\t\t\t\t.onChange(async (value) => {\n\t\t\t\t\t\tif (value == '') {\n\t\t\t\t\t\t\tvalue = SAMPLE_SMILES_1;",
"score": 0.8128829002380371
}
] |
typescript
|
ctx.addChild(new SmilesBlock(el, source, ctx, this.settings)); // pass plugin settings, maybe useful in react settings provider.
};
|
import { Plugin, MarkdownPostProcessorContext } from 'obsidian';
import {
DEFAULT_SETTINGS,
ChemPluginSettings,
SETTINGS_VERSION,
} from './settings/base';
import { ChemSettingTab } from './settings/SettingTab';
import { updateSettingsVersion } from './settings/update';
import { SmilesBlock } from './SmilesBlock';
import { setBlocks, clearBlocks } from './global/blocks';
import { setDrawer, clearDrawer } from './global/drawer';
import { setObserver, detachObserver } from './themeObserver';
export default class ChemPlugin extends Plugin {
settings: ChemPluginSettings;
async onload() {
await this.loadSettings();
// this.addRibbonIcon('hexagon', 'This is Chem Plugin', () => {});
// initialize global variables
setDrawer(this.settings.options);
setBlocks();
setObserver();
this.addSettingTab(new ChemSettingTab({ app: this.app, plugin: this }));
this.registerMarkdownCodeBlockProcessor('smiles', this.smilesProcessor);
}
async onunload() {
detachObserver();
clearBlocks();
clearDrawer();
}
async loadSettings() {
const candidate = Object.assign({}, await this.loadData());
if ('version' in candidate && candidate.version == SETTINGS_VERSION)
this.settings = Object.assign({}, DEFAULT_SETTINGS, candidate);
else
this.settings = Object.assign(
{},
DEFAULT_SETTINGS,
updateSettingsVersion(candidate)
);
}
async saveSettings() {
await this.saveData(this.settings);
}
smilesProcessor = (
source: string,
el: HTMLElement,
ctx: MarkdownPostProcessorContext
) => {
ctx.addChild(
|
new SmilesBlock(el, source, ctx, this.settings)); // pass plugin settings, maybe useful in react settings provider.
};
|
}
|
src/main.ts
|
Acylation-obsidian-chem-54b1d05
|
[
{
"filename": "src/SmilesBlock.ts",
"retrieved_chunk": "import { MarkdownRenderChild, MarkdownPostProcessorContext } from 'obsidian';\nimport { gDrawer } from './global/drawer';\nimport { ChemPluginSettings } from './settings/base';\nimport { addBlock, removeBlock } from './global/blocks';\nimport { i18n } from 'src/lib/i18n';\nexport class SmilesBlock extends MarkdownRenderChild {\n\tconstructor(\n\t\tprivate readonly el: HTMLElement,\n\t\tprivate readonly markdownSource: string,\n\t\tprivate readonly context: MarkdownPostProcessorContext,",
"score": 0.8736171722412109
},
{
"filename": "src/settings/SettingTab.ts",
"retrieved_chunk": "\t\t\t\t\t.onChange(async (value) => {\n\t\t\t\t\t\tif (value == '') {\n\t\t\t\t\t\t\tvalue = SAMPLE_SMILES_2;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tthis.plugin.settings.sample2 = value;\n\t\t\t\t\t\tawait this.plugin.saveSettings();\n\t\t\t\t\t\tonSettingsChange();\n\t\t\t\t\t})\n\t\t\t);\n\t\tconst preview = new LivePreview(containerEl, this.plugin.settings);",
"score": 0.8237658739089966
},
{
"filename": "src/SmilesBlock.ts",
"retrieved_chunk": "\t\tprivate readonly settings: ChemPluginSettings\n\t) {\n\t\tsuper(el); // important\n\t\taddBlock(this);\n\t}\n\trender() {\n\t\t// TODO: rendering animation\n\t\tthis.el.empty();\n\t\tconst rows = this.markdownSource\n\t\t\t.split('\\n')",
"score": 0.8210179805755615
},
{
"filename": "src/settings/SettingTab.ts",
"retrieved_chunk": "\t\t\t\t\t\t}\n\t\t\t\t\t\tthis.plugin.settings.sample1 = value;\n\t\t\t\t\t\tawait this.plugin.saveSettings();\n\t\t\t\t\t\tonSettingsChange();\n\t\t\t\t\t})\n\t\t\t)\n\t\t\t.addText((text) =>\n\t\t\t\ttext\n\t\t\t\t\t.setPlaceholder(SAMPLE_SMILES_2)\n\t\t\t\t\t.setValue(this.plugin.settings.sample2)",
"score": 0.8134797811508179
},
{
"filename": "src/settings/SettingTab.ts",
"retrieved_chunk": "\t\tnew Setting(containerEl)\n\t\t\t.setName(i18n.t('settings.preview.sample.name'))\n\t\t\t.setDesc(i18n.t('settings.preview.sample.description'))\n\t\t\t.addText((text) =>\n\t\t\t\ttext\n\t\t\t\t\t.setPlaceholder(SAMPLE_SMILES_1)\n\t\t\t\t\t.setValue(this.plugin.settings.sample1)\n\t\t\t\t\t.onChange(async (value) => {\n\t\t\t\t\t\tif (value == '') {\n\t\t\t\t\t\t\tvalue = SAMPLE_SMILES_1;",
"score": 0.7981515526771545
}
] |
typescript
|
new SmilesBlock(el, source, ctx, this.settings)); // pass plugin settings, maybe useful in react settings provider.
};
|
import { App, PluginSettingTab, Setting, SliderComponent } from 'obsidian';
import ChemPlugin from '../main';
import {
DEFAULT_SD_OPTIONS,
SAMPLE_SMILES_1,
SAMPLE_SMILES_2,
themeList,
} from './base';
import { setDrawer } from 'src/global/drawer';
import { refreshBlocks } from 'src/global/blocks';
import { LivePreview } from './LivePreview';
import { i18n } from 'src/lib/i18n';
// Reference: https://smilesdrawer.surge.sh/playground.html
export class ChemSettingTab extends PluginSettingTab {
plugin: ChemPlugin;
constructor({ app, plugin }: { app: App; plugin: ChemPlugin }) {
super(app, plugin);
this.plugin = plugin;
}
display(): void {
const { containerEl } = this;
containerEl.empty();
const scaleSetting = new Setting(containerEl)
.setName(i18n.t('settings.scale.name'))
.setDesc(i18n.t('settings.scale.description'))
.addExtraButton((button) => {
button
.setIcon('rotate-ccw')
.setTooltip(i18n.t('settings.scale.description'))
.onClick(async () => {
this.plugin.settings.options.scale = 1;
scaleSlider.setValue(50);
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
unifyBondLength();
});
});
const scaleLabel = scaleSetting.controlEl.createDiv('slider-readout');
scaleLabel.setText(
(this.plugin.settings.options.scale ?? 1.0).toFixed(2).toString()
);
const scaleSlider = new SliderComponent(scaleSetting.controlEl)
.setValue(50 * (this.plugin.settings.options.scale ?? 1.0))
.setLimits(0.0, 100, 0.5)
.onChange(async (value) => {
this.plugin.settings.options.scale = value / 50;
scaleLabel.setText((value / 50).toFixed(2).toString());
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
if (value == 0) unifyImageWidth();
else unifyBondLength();
});
const widthSettings = new Setting(containerEl);
new Setting(containerEl)
.setName(i18n.t('settings.theme.light.name'))
.setDesc(i18n.t('settings.theme.light.description'))
.addDropdown((dropdown) =>
dropdown
.addOptions(themeList)
.setValue(this.plugin.settings.lightTheme)
.onChange(async (value) => {
this.plugin.settings.lightTheme = value;
await this.plugin.saveSettings();
onSettingsChange();
})
);
new Setting(containerEl)
.setName(i18n.t('settings.theme.dark.name'))
.setDesc(i18n.t('settings.theme.dark.description'))
.addDropdown((dropdown) =>
dropdown
.addOptions(themeList)
.setValue(this.plugin.settings.darkTheme)
.onChange(async (value) => {
this.plugin.settings.darkTheme = value;
await this.plugin.saveSettings();
onSettingsChange();
})
);
new Setting(containerEl)
.setName(i18n.t('settings.preview.title'))
.setHeading();
new Setting(containerEl)
.setName(i18n.t('settings.preview.sample.name'))
.setDesc(i18n.t('settings.preview.sample.description'))
.addText((text) =>
text
.setPlaceholder(SAMPLE_SMILES_1)
.setValue(this.plugin.settings.sample1)
.onChange(async (value) => {
if (value == '') {
value = SAMPLE_SMILES_1;
}
this.plugin.settings.sample1 = value;
await this.plugin.saveSettings();
onSettingsChange();
})
)
.addText((text) =>
text
.setPlaceholder(SAMPLE_SMILES_2)
.setValue(this.plugin.settings.sample2)
.onChange(async (value) => {
if (value == '') {
value = SAMPLE_SMILES_2;
}
this.plugin.settings.sample2 = value;
await this.plugin.saveSettings();
onSettingsChange();
})
);
|
const preview = new LivePreview(containerEl, this.plugin.settings);
|
new Setting(containerEl)
.setName(i18n.t('settings.advanced.title'))
.setHeading();
new Setting(containerEl)
.setName(i18n.t('settings.advanced.compact-drawing.name'))
.setDesc(i18n.t('settings.advanced.compact-drawing.description'))
.addToggle((toggle) =>
toggle
.setValue(
this.plugin.settings.options?.compactDrawing ?? false
)
.onChange(async (value) => {
this.plugin.settings.options.compactDrawing = value;
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
})
);
new Setting(containerEl)
.setName(i18n.t('settings.advanced.terminal-carbons.name'))
.setDesc(i18n.t('settings.advanced.terminal-carbons.description'))
.addToggle((toggle) =>
toggle
.setValue(
this.plugin.settings.options?.terminalCarbons ?? false
)
.onChange(async (value) => {
this.plugin.settings.options.terminalCarbons = value;
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
})
);
const onSettingsChange = () => {
preview.updateSettings(this.plugin.settings);
preview.render();
};
const unifyBondLength = () => {
widthSettings.controlEl.empty();
widthSettings
.setName(i18n.t('settings.unify-bond-length.name'))
.setDesc(i18n.t('settings.unify-bond-length.description'))
.addText((text) =>
text
.setValue(
this.plugin.settings.options.width?.toString() ??
'300'
)
.onChange(async (value) => {
if (value == '') {
value = '300';
}
this.plugin.settings.options.width =
parseInt(value);
this.plugin.settings.options.height =
parseInt(value);
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
})
);
};
const unifyImageWidth = () => {
widthSettings.controlEl.empty();
widthSettings
.setName(i18n.t('settings.unify-bond-length.name'))
.setDesc(i18n.t('settings.unify-bond-length.description'))
.addText((text) => {
text.setValue(
this.plugin.settings?.imgWidth.toString() ?? '300'
)
.setPlaceholder('300')
.onChange(async (value) => {
if (value == '') {
value = '300';
}
this.plugin.settings.imgWidth = parseInt(value);
await this.plugin.saveSettings();
onSettingsChange();
});
});
};
// initialize
preview.render();
if ((this.plugin.settings.options?.scale ?? 1) == 0) unifyImageWidth();
else unifyBondLength();
}
hide(): void {
refreshBlocks();
}
}
|
src/settings/SettingTab.ts
|
Acylation-obsidian-chem-54b1d05
|
[
{
"filename": "src/main.ts",
"retrieved_chunk": "\tasync saveSettings() {\n\t\tawait this.saveData(this.settings);\n\t}\n\tsmilesProcessor = (\n\t\tsource: string,\n\t\tel: HTMLElement,\n\t\tctx: MarkdownPostProcessorContext\n\t) => {\n\t\tctx.addChild(new SmilesBlock(el, source, ctx, this.settings)); // pass plugin settings, maybe useful in react settings provider.\n\t};",
"score": 0.8329126834869385
},
{
"filename": "src/settings/LivePreview.ts",
"retrieved_chunk": "import { ChemPluginSettings } from '../settings/base';\nimport { gDrawer } from 'src/global/drawer';\nimport { i18n } from 'src/lib/i18n';\nexport class LivePreview {\n\tcontainer: HTMLDivElement;\n\tlightCard: HTMLDivElement;\n\tdarkCard: HTMLDivElement;\n\tsettings: ChemPluginSettings;\n\tconstructor(\n\t\tprivate readonly el: HTMLElement,",
"score": 0.8307794332504272
},
{
"filename": "src/settings/base.ts",
"retrieved_chunk": "\tsample1: string;\n\tsample2: string;\n\timgWidth: number;\n\toptions: Partial<SMILES_DRAWER_OPTIONS>;\n}\nexport const DEFAULT_SETTINGS: ChemPluginSettings = {\n\tversion: SETTINGS_VERSION,\n\tdarkTheme: 'dark',\n\tlightTheme: 'light',\n\tsample1: SAMPLE_SMILES_1,",
"score": 0.8091875314712524
},
{
"filename": "src/main.ts",
"retrieved_chunk": "\t\tsetObserver();\n\t\tthis.addSettingTab(new ChemSettingTab({ app: this.app, plugin: this }));\n\t\tthis.registerMarkdownCodeBlockProcessor('smiles', this.smilesProcessor);\n\t}\n\tasync onunload() {\n\t\tdetachObserver();\n\t\tclearBlocks();\n\t\tclearDrawer();\n\t}\n\tasync loadSettings() {",
"score": 0.8062631487846375
},
{
"filename": "src/settings/LivePreview.ts",
"retrieved_chunk": "\t\t\t}px, 1fr)`;\n\t\telse\n\t\t\tthis.container.style.gridTemplateColumns = `repeat(auto-fill, minmax(${(lightWidth >\n\t\t\tdarkWidth\n\t\t\t\t? lightWidth\n\t\t\t\t: darkWidth\n\t\t\t).toString()}px, 1fr)`;\n\t};\n\tupdateSettings = (argSettings: ChemPluginSettings) => {\n\t\tthis.settings = argSettings;",
"score": 0.8019773364067078
}
] |
typescript
|
const preview = new LivePreview(containerEl, this.plugin.settings);
|
import { App, PluginSettingTab, Setting, SliderComponent } from 'obsidian';
import ChemPlugin from '../main';
import {
DEFAULT_SD_OPTIONS,
SAMPLE_SMILES_1,
SAMPLE_SMILES_2,
themeList,
} from './base';
import { setDrawer } from 'src/global/drawer';
import { refreshBlocks } from 'src/global/blocks';
import { LivePreview } from './LivePreview';
import { i18n } from 'src/lib/i18n';
// Reference: https://smilesdrawer.surge.sh/playground.html
export class ChemSettingTab extends PluginSettingTab {
plugin: ChemPlugin;
constructor({ app, plugin }: { app: App; plugin: ChemPlugin }) {
super(app, plugin);
this.plugin = plugin;
}
display(): void {
const { containerEl } = this;
containerEl.empty();
const scaleSetting = new Setting(containerEl)
.setName(i18n.t('settings.scale.name'))
.setDesc(i18n.t('settings.scale.description'))
.addExtraButton((button) => {
button
.setIcon('rotate-ccw')
.setTooltip(i18n.t('settings.scale.description'))
.onClick(async () => {
this.plugin.settings.options.scale = 1;
scaleSlider.setValue(50);
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
unifyBondLength();
});
});
const scaleLabel = scaleSetting.controlEl.createDiv('slider-readout');
scaleLabel.setText(
(this.plugin.settings.options.scale ?? 1.0).toFixed(2).toString()
);
const scaleSlider = new SliderComponent(scaleSetting.controlEl)
.setValue(50 * (this.plugin.settings.options.scale ?? 1.0))
.setLimits(0.0, 100, 0.5)
.onChange(async (value) => {
this.plugin.settings.options.scale = value / 50;
scaleLabel.setText((value / 50).toFixed(2).toString());
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
if (value == 0) unifyImageWidth();
else unifyBondLength();
});
const widthSettings = new Setting(containerEl);
new Setting(containerEl)
.setName(i18n.t('settings.theme.light.name'))
.setDesc(i18n.t('settings.theme.light.description'))
.addDropdown((dropdown) =>
dropdown
.addOptions(themeList)
.setValue(this.plugin.settings.lightTheme)
.onChange(async (value) => {
this.plugin.settings.lightTheme = value;
await this.plugin.saveSettings();
onSettingsChange();
})
);
new Setting(containerEl)
.setName(i18n.t('settings.theme.dark.name'))
.setDesc(i18n.t('settings.theme.dark.description'))
.addDropdown((dropdown) =>
dropdown
.addOptions(themeList)
.setValue(this.plugin.settings.darkTheme)
.onChange(async (value) => {
this.plugin.settings.darkTheme = value;
await this.plugin.saveSettings();
onSettingsChange();
})
);
new Setting(containerEl)
.setName(i18n.t('settings.preview.title'))
.setHeading();
new Setting(containerEl)
.setName(i18n.t('settings.preview.sample.name'))
.setDesc(i18n.t('settings.preview.sample.description'))
.addText((text) =>
text
.setPlaceholder(SAMPLE_SMILES_1)
.setValue(this.plugin.settings.sample1)
.onChange(async (value) => {
if (value == '') {
value = SAMPLE_SMILES_1;
}
this.plugin.settings.sample1 = value;
await this.plugin.saveSettings();
onSettingsChange();
})
)
.addText((text) =>
text
.setPlaceholder(SAMPLE_SMILES_2)
.setValue(this.plugin.settings.sample2)
.onChange(async (value) => {
if (value == '') {
value = SAMPLE_SMILES_2;
}
this.plugin.settings.sample2 = value;
await this.plugin.saveSettings();
onSettingsChange();
})
);
const preview = new LivePreview(containerEl, this.plugin.settings);
new Setting(containerEl)
.setName(i18n.t('settings.advanced.title'))
.setHeading();
new Setting(containerEl)
.setName(i18n.t('settings.advanced.compact-drawing.name'))
.setDesc(i18n.t('settings.advanced.compact-drawing.description'))
.addToggle((toggle) =>
toggle
.setValue(
this.plugin.settings.options?.compactDrawing ?? false
)
.onChange(async (value) => {
this.plugin.settings.options.compactDrawing = value;
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
})
);
new Setting(containerEl)
.setName(i18n.t('settings.advanced.terminal-carbons.name'))
.setDesc(i18n.t('settings.advanced.terminal-carbons.description'))
.addToggle((toggle) =>
toggle
.setValue(
this.plugin.settings.options?.terminalCarbons ?? false
)
.onChange(async (value) => {
this.plugin.settings.options.terminalCarbons = value;
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
})
);
const onSettingsChange = () => {
preview.updateSettings(this.plugin.settings);
|
preview.render();
|
};
const unifyBondLength = () => {
widthSettings.controlEl.empty();
widthSettings
.setName(i18n.t('settings.unify-bond-length.name'))
.setDesc(i18n.t('settings.unify-bond-length.description'))
.addText((text) =>
text
.setValue(
this.plugin.settings.options.width?.toString() ??
'300'
)
.onChange(async (value) => {
if (value == '') {
value = '300';
}
this.plugin.settings.options.width =
parseInt(value);
this.plugin.settings.options.height =
parseInt(value);
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
})
);
};
const unifyImageWidth = () => {
widthSettings.controlEl.empty();
widthSettings
.setName(i18n.t('settings.unify-bond-length.name'))
.setDesc(i18n.t('settings.unify-bond-length.description'))
.addText((text) => {
text.setValue(
this.plugin.settings?.imgWidth.toString() ?? '300'
)
.setPlaceholder('300')
.onChange(async (value) => {
if (value == '') {
value = '300';
}
this.plugin.settings.imgWidth = parseInt(value);
await this.plugin.saveSettings();
onSettingsChange();
});
});
};
// initialize
preview.render();
if ((this.plugin.settings.options?.scale ?? 1) == 0) unifyImageWidth();
else unifyBondLength();
}
hide(): void {
refreshBlocks();
}
}
|
src/settings/SettingTab.ts
|
Acylation-obsidian-chem-54b1d05
|
[
{
"filename": "src/main.ts",
"retrieved_chunk": "\t\tsetObserver();\n\t\tthis.addSettingTab(new ChemSettingTab({ app: this.app, plugin: this }));\n\t\tthis.registerMarkdownCodeBlockProcessor('smiles', this.smilesProcessor);\n\t}\n\tasync onunload() {\n\t\tdetachObserver();\n\t\tclearBlocks();\n\t\tclearDrawer();\n\t}\n\tasync loadSettings() {",
"score": 0.8397533893585205
},
{
"filename": "src/main.ts",
"retrieved_chunk": "import { setDrawer, clearDrawer } from './global/drawer';\nimport { setObserver, detachObserver } from './themeObserver';\nexport default class ChemPlugin extends Plugin {\n\tsettings: ChemPluginSettings;\n\tasync onload() {\n\t\tawait this.loadSettings();\n\t\t// this.addRibbonIcon('hexagon', 'This is Chem Plugin', () => {});\n\t\t// initialize global variables\n\t\tsetDrawer(this.settings.options);\n\t\tsetBlocks();",
"score": 0.8222482204437256
},
{
"filename": "src/main.ts",
"retrieved_chunk": "\t\tconst candidate = Object.assign({}, await this.loadData());\n\t\tif ('version' in candidate && candidate.version == SETTINGS_VERSION)\n\t\t\tthis.settings = Object.assign({}, DEFAULT_SETTINGS, candidate);\n\t\telse\n\t\t\tthis.settings = Object.assign(\n\t\t\t\t{},\n\t\t\t\tDEFAULT_SETTINGS,\n\t\t\t\tupdateSettingsVersion(candidate)\n\t\t\t);\n\t}",
"score": 0.8210161328315735
},
{
"filename": "src/settings/LivePreview.ts",
"retrieved_chunk": "\t\t\t}px, 1fr)`;\n\t\telse\n\t\t\tthis.container.style.gridTemplateColumns = `repeat(auto-fill, minmax(${(lightWidth >\n\t\t\tdarkWidth\n\t\t\t\t? lightWidth\n\t\t\t\t: darkWidth\n\t\t\t).toString()}px, 1fr)`;\n\t};\n\tupdateSettings = (argSettings: ChemPluginSettings) => {\n\t\tthis.settings = argSettings;",
"score": 0.8208521604537964
},
{
"filename": "src/settings/LivePreview.ts",
"retrieved_chunk": "\t\t\t\t}\n\t\t\t}\n\t\t);\n\t\tif (this.settings.options.scale == 0)\n\t\t\tsvg.style.width = `${\n\t\t\t\tthis.settings?.imgWidth.toString() ?? '300'\n\t\t\t}px`;\n\t\telse if (\n\t\t\tparseFloat(svg.style.width) > (this.settings.options?.width ?? 300)\n\t\t) {",
"score": 0.8169257640838623
}
] |
typescript
|
preview.render();
|
import { createExportTraceServiceRequest } from '@opentelemetry/otlp-transformer'
import { ExportServiceError, OTLPExporterError } from '@opentelemetry/otlp-exporter-base'
import { ExportResult, ExportResultCode } from '@opentelemetry/core'
import { SpanExporter } from '@opentelemetry/sdk-trace-base'
import { unwrap } from './wrap.js'
export interface OTLPExporterConfig {
url: string
headers?: Record<string, string>
}
const defaultHeaders: Record<string, string> = {
accept: 'application/json',
'content-type': 'application/json',
}
export class OTLPExporter implements SpanExporter {
private headers: Record<string, string>
private url: string
constructor(config: OTLPExporterConfig) {
this.url = config.url
this.headers = Object.assign({}, defaultHeaders, config.headers)
}
export(items: any[], resultCallback: (result: ExportResult) => void): void {
this._export(items)
.then(() => {
resultCallback({ code: ExportResultCode.SUCCESS })
})
.catch((error: ExportServiceError) => {
resultCallback({ code: ExportResultCode.FAILED, error })
})
}
private _export(items: any[]): Promise<unknown> {
return new Promise<void>((resolve, reject) => {
try {
this.send(items, resolve, reject)
} catch (e) {
reject(e)
}
})
}
send(items: any[], onSuccess: () => void, onError: (error: OTLPExporterError) => void): void {
const exportMessage = createExportTraceServiceRequest(items, true)
const body = JSON.stringify(exportMessage)
const params: RequestInit = {
method: 'POST',
headers: this.headers,
body,
}
unwrap(fetch)(this.url, params)
.then(
|
(response) => {
|
if (response.ok) {
onSuccess()
} else {
onError(new OTLPExporterError(`Exporter received a statusCode: ${response.status}`))
}
})
.catch((error) => {
onError(new OTLPExporterError(`Exception during export: ${error.toString()}`, error.code, error.stack))
})
}
async shutdown(): Promise<void> {}
}
|
src/exporter.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\tconst name = id.name || ''\n\tconst promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request)\n\t\t\tif (response.ok) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response",
"score": 0.8027036786079407
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tconst promise = tracer.startActiveSpan('fetchHandler', options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request, env, ctx)\n\t\t\tif (response.status < 500) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response\n\t\t} catch (error) {",
"score": 0.8013643026351929
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\t\t\tthis.inprogressExports.set(traceId, promise)\n\t\t\tpromise.then((result) => {\n\t\t\t\tif (result.code === ExportResultCode.FAILED) {\n\t\t\t\t\tconsole.log('Error sending spans to exporter:', result.error)\n\t\t\t\t}\n\t\t\t\tthis.action(traceId, { actionName: 'exportDone' })\n\t\t\t\tthis.inprogressExports.delete(traceId)\n\t\t\t})\n\t\t}\n\t}",
"score": 0.7990209460258484
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tconst options: SpanOptions = { kind: SpanKind.CLIENT, attributes: attrs }\n\t\t\tconst host = new URL(request.url).host\n\t\t\tconst spanName = typeof attrs?.['name'] === 'string' ? attrs?.['name'] : `fetch: ${host}`\n\t\t\tconst promise = tracer.startActiveSpan(spanName, options, async (span) => {\n\t\t\t\tconst includeTraceContext =\n\t\t\t\t\ttypeof config.includeTraceContext === 'function'\n\t\t\t\t\t\t? config.includeTraceContext(request)\n\t\t\t\t\t\t: config.includeTraceContext\n\t\t\t\tif (includeTraceContext ?? true) {\n\t\t\t\t\tpropagation.inject(api_context.active(), request.headers, {",
"score": 0.792844295501709
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\tconst fetchHandler: ProxyHandler<FetchFn> = {\n\t\tasync apply(target, thisArg, argArray: Parameters<FetchFn>) {\n\t\t\tconst request = argArray[0]\n\t\t\tconst config = initialiser(env, request)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst bound = target.bind(unwrap(thisArg))\n\t\t\t\treturn await api_context.with(context, executeDOFetch, undefined, bound, request, id)\n\t\t\t} catch (error) {\n\t\t\t\tthrow error",
"score": 0.7906253337860107
}
] |
typescript
|
(response) => {
|
import { createExportTraceServiceRequest } from '@opentelemetry/otlp-transformer'
import { ExportServiceError, OTLPExporterError } from '@opentelemetry/otlp-exporter-base'
import { ExportResult, ExportResultCode } from '@opentelemetry/core'
import { SpanExporter } from '@opentelemetry/sdk-trace-base'
import { unwrap } from './wrap.js'
export interface OTLPExporterConfig {
url: string
headers?: Record<string, string>
}
const defaultHeaders: Record<string, string> = {
accept: 'application/json',
'content-type': 'application/json',
}
export class OTLPExporter implements SpanExporter {
private headers: Record<string, string>
private url: string
constructor(config: OTLPExporterConfig) {
this.url = config.url
this.headers = Object.assign({}, defaultHeaders, config.headers)
}
export(items: any[], resultCallback: (result: ExportResult) => void): void {
this._export(items)
.then(() => {
resultCallback({ code: ExportResultCode.SUCCESS })
})
.catch((error: ExportServiceError) => {
resultCallback({ code: ExportResultCode.FAILED, error })
})
}
private _export(items: any[]): Promise<unknown> {
return new Promise<void>((resolve, reject) => {
try {
this.send(items, resolve, reject)
} catch (e) {
reject(e)
}
})
}
send(items: any[], onSuccess: () => void, onError: (error: OTLPExporterError) => void): void {
const exportMessage = createExportTraceServiceRequest(items, true)
const body = JSON.stringify(exportMessage)
const params: RequestInit = {
method: 'POST',
headers: this.headers,
body,
}
unwrap(fetch)(this.url, params)
.then((response) => {
if (response.ok) {
onSuccess()
} else {
onError(new OTLPExporterError(`Exporter received a statusCode: ${response.status}`))
}
})
.catch(
|
(error) => {
|
onError(new OTLPExporterError(`Exception during export: ${error.toString()}`, error.code, error.stack))
})
}
async shutdown(): Promise<void> {}
}
|
src/exporter.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tconst promise = tracer.startActiveSpan('fetchHandler', options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request, env, ctx)\n\t\t\tif (response.status < 500) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response\n\t\t} catch (error) {",
"score": 0.8244552612304688
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\tconst name = id.name || ''\n\tconst promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request)\n\t\t\tif (response.ok) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response",
"score": 0.8239078521728516
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\tconst fetchHandler: ProxyHandler<FetchFn> = {\n\t\tasync apply(target, thisArg, argArray: Parameters<FetchFn>) {\n\t\t\tconst request = argArray[0]\n\t\t\tconst config = initialiser(env, request)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst bound = target.bind(unwrap(thisArg))\n\t\t\t\treturn await api_context.with(context, executeDOFetch, undefined, bound, request, id)\n\t\t\t} catch (error) {\n\t\t\t\tthrow error",
"score": 0.798986554145813
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\tapply: async (target, _thisArg, argArray: Parameters<FetchHandler>): Promise<Response> => {\n\t\t\tconst [request, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, request)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: FetchHandlerArgs = [request, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeFetchHandler, undefined, target, args)\n\t\t\t} catch (error) {",
"score": 0.788811445236206
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\t\t\tthis.inprogressExports.set(traceId, promise)\n\t\t\tpromise.then((result) => {\n\t\t\t\tif (result.code === ExportResultCode.FAILED) {\n\t\t\t\t\tconsole.log('Error sending spans to exporter:', result.error)\n\t\t\t\t}\n\t\t\t\tthis.action(traceId, { actionName: 'exportDone' })\n\t\t\t\tthis.inprogressExports.delete(traceId)\n\t\t\t})\n\t\t}\n\t}",
"score": 0.7857745289802551
}
] |
typescript
|
(error) => {
|
import { trace } from '@opentelemetry/api'
import { WorkerTracer } from '../tracer.js'
import { passthroughGet, wrap } from '../wrap.js'
type ContextAndTracker = { ctx: ExecutionContext; tracker: PromiseTracker }
type WaitUntilFn = ExecutionContext['waitUntil']
export class PromiseTracker {
_outstandingPromises: Promise<unknown>[] = []
get outstandingPromiseCount() {
return this._outstandingPromises.length
}
track(promise: Promise<unknown>): void {
this._outstandingPromises.push(promise)
}
async wait() {
await allSettledMutable(this._outstandingPromises)
}
}
function createWaitUntil(fn: WaitUntilFn, context: ExecutionContext, tracker: PromiseTracker): WaitUntilFn {
const handler: ProxyHandler<WaitUntilFn> = {
apply(target, thisArg, argArray) {
tracker.track(argArray[0])
return Reflect.apply(target, context, argArray)
},
}
return wrap(fn, handler)
}
export function proxyExecutionContext(context: ExecutionContext): ContextAndTracker {
const tracker = new PromiseTracker()
const ctx = new Proxy(context, {
get(target, prop) {
if (prop === 'waitUntil') {
const fn = Reflect.get(target, prop)
return createWaitUntil(fn, context, tracker)
} else {
return passthroughGet(target, prop)
}
},
})
return { ctx, tracker }
}
export async function exportSpans(tracker?: PromiseTracker) {
const tracer = trace.getTracer('export')
if (tracer instanceof WorkerTracer) {
await scheduler.wait(1)
if (tracker) {
await tracker.wait()
}
|
await tracer.spanProcessor.forceFlush()
} else {
|
console.error('The global tracer is not of type WorkerTracer and can not export spans')
}
}
/** Like `Promise.allSettled`, but handles modifications to the promises array */
export async function allSettledMutable(promises: Promise<unknown>[]): Promise<PromiseSettledResult<unknown>[]> {
let values: PromiseSettledResult<unknown>[]
// when the length of the array changes, there has been a nested call to waitUntil
// and we should await the promises again
do {
values = await Promise.allSettled(promises)
} while (values.length !== promises.length)
return values
}
|
src/instrumentation/common.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\t\t\tthis.inprogressExports.set(traceId, promise)\n\t\t\tpromise.then((result) => {\n\t\t\t\tif (result.code === ExportResultCode.FAILED) {\n\t\t\t\t\tconsole.log('Error sending spans to exporter:', result.error)\n\t\t\t\t}\n\t\t\t\tthis.action(traceId, { actionName: 'exportDone' })\n\t\t\t\tthis.inprogressExports.delete(traceId)\n\t\t\t})\n\t\t}\n\t}",
"score": 0.8676971793174744
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "}\nfunction startExport(currentState: TraceCompleteState, { args }: StartExportAction): ExportingState | DoneState {\n\tconst { exporter, tailSampler, postProcessor } = args\n\tconst { traceId, localRootSpan, completedSpans: spans } = currentState\n\tconst shouldExport = tailSampler({ traceId, localRootSpan, spans })\n\tif (shouldExport) {\n\t\tconst exportSpans = postProcessor(spans)\n\t\tconst promise = new Promise<ExportResult>((resolve) => {\n\t\t\texporter.export(exportSpans, resolve)\n\t\t})",
"score": 0.8551846146583557
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "\tif (!initialised) {\n\t\tinstrumentGlobalCache()\n\t\tinstrumentGlobalFetch()\n\t\tpropagation.setGlobalPropagator(new W3CTraceContextPropagator())\n\t\tconst resource = createResource(config)\n\t\tconst spanProcessor = new BatchTraceSpanProcessor()\n\t\tconst provider = new WorkerTracerProvider(spanProcessor, resource)\n\t\tprovider.register()\n\t\tinitialised = true\n\t}",
"score": 0.8329238891601562
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tconst workerConfig = getActiveConfig()\n\tconst acceptTraceContext =\n\t\ttypeof workerConfig.handlers.fetch.acceptTraceContext === 'function'\n\t\t\t? workerConfig.handlers.fetch.acceptTraceContext(request)\n\t\t\t: workerConfig.handlers.fetch.acceptTraceContext ?? true\n\treturn acceptTraceContext ? getParentContextFromHeaders(request.headers) : api_context.active()\n}\nexport function waitUntilTrace(fn: () => Promise<any>): Promise<void> {\n\tconst tracer = trace.getTracer('waitUntil')\n\treturn tracer.startActiveSpan('waitUntil', async (span) => {",
"score": 0.8314852118492126
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\tconst tracer = trace.getTracer('queueHandler')\n\tconst options: SpanOptions = {\n\t\tattributes: {\n\t\t\t'queue.name': batch.queue,\n\t\t},\n\t\tkind: SpanKind.CONSUMER,\n\t}\n\tconst promise = tracer.startActiveSpan(`queueHandler:${batch.queue}`, options, async (span) => {\n\t\tconst traceId = span.spanContext().traceId\n\t\tapi_context.active().setValue(traceIdSymbol, traceId)",
"score": 0.8269138336181641
}
] |
typescript
|
await tracer.spanProcessor.forceFlush()
} else {
|
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger:
|
DOConstructorTrigger = {
|
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
|
src/instrumentation/do.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/sdk.ts",
"retrieved_chunk": "}\nexport function instrumentDO(doClass: DOClass, config: ConfigurationOption) {\n\tconst initialiser = createInitialiser(config)\n\treturn instrumentDOClass(doClass, initialiser)\n}\nexport { waitUntilTrace } from './instrumentation/fetch.js'",
"score": 0.8585717082023621
},
{
"filename": "src/instrumentation/do-storage.ts",
"retrieved_chunk": "\treturn wrap(fn, fnHandler)\n}\nexport function instrumentStorage(storage: DurableObjectStorage): DurableObjectStorage {\n\tconst storageHandler: ProxyHandler<DurableObjectStorage> = {\n\t\tget: (target, prop, receiver) => {\n\t\t\tconst operation = String(prop)\n\t\t\tconst fn = Reflect.get(target, prop, receiver)\n\t\t\treturn instrumentStorageFn(fn, operation)\n\t\t},\n\t}",
"score": 0.7989600896835327
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createFetchHandler(fetchFn: FetchHandler, initialiser: Initialiser) {\n\tconst fetchHandler: ProxyHandler<FetchHandler> = {",
"score": 0.7879672050476074
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "import { instrumentGlobalCache } from './instrumentation/cache.js'\nimport { createQueueHandler } from './instrumentation/queue.js'\nimport { DOClass, instrumentDOClass } from './instrumentation/do.js'\ntype FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>\ntype QueueHandler = ExportedHandlerQueueHandler\nexport type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig\nexport type ConfigurationOption = TraceConfig | ResolveConfigFn\nexport function isRequest(trigger: Trigger): trigger is Request {\n\treturn trigger instanceof Request\n}",
"score": 0.7862166166305542
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "\t\t\t\treturn result\n\t\t\t})\n\t\t},\n\t}\n\treturn wrap(fn, handler)\n}\nfunction instrumentCache(cache: Cache, cacheName: string): Cache {\n\tconst handler: ProxyHandler<typeof cache> = {\n\t\tget(target, prop) {\n\t\t\tif (prop === 'delete' || prop === 'match' || prop === 'put') {",
"score": 0.7843225598335266
}
] |
typescript
|
DOConstructorTrigger = {
|
import {
trace,
SpanOptions,
SpanKind,
propagation,
context as api_context,
Attributes,
Exception,
Context,
SpanStatusCode,
} from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { Initialiser, getActiveConfig, setConfig } from '../config.js'
import { wrap } from '../wrap.js'
import { instrumentEnv } from './env.js'
import { exportSpans, proxyExecutionContext } from './common.js'
import { ResolvedTraceConfig } from '../types.js'
export type IncludeTraceContextFn = (request: Request) => boolean
export interface FetcherConfig {
includeTraceContext?: boolean | IncludeTraceContextFn
}
export type AcceptTraceContextFn = (request: Request) => boolean
export interface FetchHandlerConfig {
/**
* Whether to enable context propagation for incoming requests to `fetch`.
* This enables or disables distributed tracing from W3C Trace Context headers.
* @default true
*/
acceptTraceContext?: boolean | AcceptTraceContextFn
}
type FetchHandler = ExportedHandlerFetchHandler
type FetchHandlerArgs = Parameters<FetchHandler>
export function sanitiseURL(url: string): string {
const u = new URL(url)
return `${u.protocol}//${u.host}${u.pathname}${u.search}`
}
const gatherOutgoingCfAttributes = (cf: RequestInitCfProperties): Attributes => {
const attrs: Record<string, string | number> = {}
Object.keys(cf).forEach((key) => {
const value = cf[key]
if (typeof value === 'string' || typeof value === 'number') {
attrs[`cf.${key}`] = value
} else {
attrs[`cf.${key}`] = JSON.stringify(value)
}
})
return attrs
}
export function gatherRequestAttributes(request: Request): Attributes {
const attrs: Record<string, string | number> = {}
const headers = request.headers
// attrs[SemanticAttributes.HTTP_CLIENT_IP] = '1.1.1.1'
attrs[SemanticAttributes.HTTP_METHOD] = request.method
attrs[SemanticAttributes.HTTP_URL] = sanitiseURL(request.url)
attrs[SemanticAttributes.HTTP_USER_AGENT] = headers.get('user-agent')!
attrs[SemanticAttributes.HTTP_REQUEST_CONTENT_LENGTH] = headers.get('content-length')!
attrs['http.request_content-type'] = headers.get('content-type')!
attrs['http.accepts'] = headers.get('accepts')!
return attrs
}
export function gatherResponseAttributes(response: Response): Attributes {
const attrs: Record<string, string | number> = {}
attrs[SemanticAttributes.HTTP_STATUS_CODE] = response.status
attrs[SemanticAttributes.HTTP_RESPONSE_CONTENT_LENGTH] = response.headers.get('content-length')!
attrs['http.response_content-type'] = response.headers.get('content-type')!
return attrs
}
export function gatherIncomingCfAttributes(request: Request): Attributes {
const attrs: Record<string, string | number> = {}
attrs[SemanticAttributes.HTTP_SCHEME] = request.cf?.httpProtocol as string
attrs['net.colo'] = request.cf?.colo as string
attrs['net.country'] = request.cf?.country as string
attrs['net.request_priority'] = request.cf?.requestPriority as string
attrs['net.tls_cipher'] = request.cf?.tlsCipher as string
attrs['net.tls_version'] = request.cf?.tlsVersion as string
attrs['net.asn'] = request.cf?.asn as number
attrs['net.tcp_rtt'] = request.cf?.clientTcpRtt as number
return attrs
}
export function getParentContextFromHeaders(headers: Headers): Context {
return propagation.extract(api_context.active(), headers, {
get(headers, key) {
return headers.get(key) || undefined
},
keys(headers) {
return [...headers.keys()]
},
})
}
function getParentContextFromRequest(request: Request) {
const workerConfig = getActiveConfig()
const acceptTraceContext =
typeof workerConfig.handlers.fetch.acceptTraceContext === 'function'
? workerConfig.handlers.fetch.acceptTraceContext(request)
: workerConfig.handlers.fetch.acceptTraceContext ?? true
return acceptTraceContext ? getParentContextFromHeaders(request.headers) : api_context.active()
}
export function waitUntilTrace(fn: () => Promise<any>): Promise<void> {
const tracer = trace.getTracer('waitUntil')
return tracer.startActiveSpan('waitUntil', async (span) => {
await fn()
span.end()
})
}
let cold_start = true
export function executeFetchHandler(fetchFn: FetchHandler, [request, env, ctx]: FetchHandlerArgs): Promise<Response> {
const spanContext = getParentContextFromRequest(request)
const tracer = trace.getTracer('fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
[SemanticAttributes.FAAS_EXECUTION]: request.headers.get('cf-ray') ?? undefined,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const promise = tracer.startActiveSpan('fetchHandler', options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request, env, ctx)
if (response.status < 500) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
|
export function createFetchHandler(fetchFn: FetchHandler, initialiser: Initialiser) {
|
const fetchHandler: ProxyHandler<FetchHandler> = {
apply: async (target, _thisArg, argArray: Parameters<FetchHandler>): Promise<Response> => {
const [request, orig_env, orig_ctx] = argArray
const config = initialiser(orig_env as Record<string, unknown>, request)
const env = instrumentEnv(orig_env as Record<string, unknown>)
const { ctx, tracker } = proxyExecutionContext(orig_ctx)
const context = setConfig(config)
try {
const args: FetchHandlerArgs = [request, env, ctx]
return await api_context.with(context, executeFetchHandler, undefined, target, args)
} catch (error) {
throw error
} finally {
orig_ctx.waitUntil(exportSpans(tracker))
}
},
}
return wrap(fetchFn, fetchHandler)
}
type getFetchConfig = (config: ResolvedTraceConfig) => FetcherConfig
export function instrumentFetcher(
fetchFn: Fetcher['fetch'],
configFn: getFetchConfig,
attrs?: Attributes
): Fetcher['fetch'] {
const handler: ProxyHandler<typeof fetch> = {
apply: (target, thisArg, argArray): ReturnType<typeof fetch> => {
const workerConfig = getActiveConfig()
const config = configFn(workerConfig)
const request = new Request(argArray[0], argArray[1])
const tracer = trace.getTracer('fetcher')
const options: SpanOptions = { kind: SpanKind.CLIENT, attributes: attrs }
const host = new URL(request.url).host
const spanName = typeof attrs?.['name'] === 'string' ? attrs?.['name'] : `fetch: ${host}`
const promise = tracer.startActiveSpan(spanName, options, async (span) => {
const includeTraceContext =
typeof config.includeTraceContext === 'function'
? config.includeTraceContext(request)
: config.includeTraceContext
if (includeTraceContext ?? true) {
propagation.inject(api_context.active(), request.headers, {
set: (h, k, v) => h.set(k, typeof v === 'string' ? v : String(v)),
})
}
span.setAttributes(gatherRequestAttributes(request))
if (request.cf) span.setAttributes(gatherOutgoingCfAttributes(request.cf))
const response: Response = await Reflect.apply(target, thisArg, [request])
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
})
return promise
},
}
return wrap(fetchFn, handler, true)
}
export function instrumentGlobalFetch(): void {
globalThis.fetch = instrumentFetcher(globalThis.fetch, (config) => config.fetch)
}
|
src/instrumentation/fetch.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t} catch (error) {\n\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nfunction instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {",
"score": 0.919965386390686
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\tconst name = id.name || ''\n\tconst promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request)\n\t\t\tif (response.ok) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response",
"score": 0.872379720211029
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\tcount.retryRemaining()\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createQueueHandler(queueFn: QueueHandler, initialiser: Initialiser) {\n\tconst queueHandler: ProxyHandler<QueueHandler> = {\n\t\tasync apply(target, _thisArg, argArray: Parameters<QueueHandler>): Promise<void> {",
"score": 0.8637076616287231
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "\tconst initialiser = createInitialiser(config)\n\tif (handler.fetch) {\n\t\tconst fetcher = unwrap(handler.fetch) as FetchHandler\n\t\thandler.fetch = createFetchHandler(fetcher, initialiser)\n\t}\n\tif (handler.queue) {\n\t\tconst queuer = unwrap(handler.queue) as QueueHandler\n\t\thandler.queue = createQueueHandler(queuer, initialiser)\n\t}\n\treturn handler",
"score": 0.8433366417884827
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t} finally {\n\t\t\t\texportSpans()\n\t\t\t}\n\t\t},\n\t}\n\treturn wrap(fetchFn, fetchHandler)\n}\nfunction instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {\n\tif (!alarmFn) return undefined\n\tconst alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {",
"score": 0.8403922915458679
}
] |
typescript
|
export function createFetchHandler(fetchFn: FetchHandler, initialiser: Initialiser) {
|
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass:
|
DOClass, initialiser: Initialiser): DOClass {
|
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
|
src/instrumentation/do.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/sdk.ts",
"retrieved_chunk": "}\nexport function instrumentDO(doClass: DOClass, config: ConfigurationOption) {\n\tconst initialiser = createInitialiser(config)\n\treturn instrumentDOClass(doClass, initialiser)\n}\nexport { waitUntilTrace } from './instrumentation/fetch.js'",
"score": 0.8461403250694275
},
{
"filename": "src/instrumentation/do-storage.ts",
"retrieved_chunk": "\treturn wrap(fn, fnHandler)\n}\nexport function instrumentStorage(storage: DurableObjectStorage): DurableObjectStorage {\n\tconst storageHandler: ProxyHandler<DurableObjectStorage> = {\n\t\tget: (target, prop, receiver) => {\n\t\t\tconst operation = String(prop)\n\t\t\tconst fn = Reflect.get(target, prop, receiver)\n\t\t\treturn instrumentStorageFn(fn, operation)\n\t\t},\n\t}",
"score": 0.7926163673400879
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "\t\t\t\treturn result\n\t\t\t})\n\t\t},\n\t}\n\treturn wrap(fn, handler)\n}\nfunction instrumentCache(cache: Cache, cacheName: string): Cache {\n\tconst handler: ProxyHandler<typeof cache> = {\n\t\tget(target, prop) {\n\t\t\tif (prop === 'delete' || prop === 'match' || prop === 'put') {",
"score": 0.771175742149353
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createFetchHandler(fetchFn: FetchHandler, initialiser: Initialiser) {\n\tconst fetchHandler: ProxyHandler<FetchHandler> = {",
"score": 0.7699263095855713
},
{
"filename": "src/instrumentation/kv.ts",
"retrieved_chunk": "\t\t\t\tspan.setAttribute('hasResult', !!result)\n\t\t\t\tspan.end()\n\t\t\t\treturn result\n\t\t\t})\n\t\t},\n\t}\n\treturn wrap(fn, fnHandler)\n}\nexport function instrumentKV(kv: KVNamespace, name: string): KVNamespace {\n\tconst kvHandler: ProxyHandler<KVNamespace> = {",
"score": 0.7653087377548218
}
] |
typescript
|
DOClass, initialiser: Initialiser): DOClass {
|
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(
|
fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
|
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
|
src/instrumentation/do.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createFetchHandler(fetchFn: FetchHandler, initialiser: Initialiser) {\n\tconst fetchHandler: ProxyHandler<FetchHandler> = {",
"score": 0.8962752819061279
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\tawait fn()\n\t\tspan.end()\n\t})\n}\nlet cold_start = true\nexport function executeFetchHandler(fetchFn: FetchHandler, [request, env, ctx]: FetchHandlerArgs): Promise<Response> {\n\tconst spanContext = getParentContextFromRequest(request)\n\tconst tracer = trace.getTracer('fetchHandler')\n\tconst attributes = {\n\t\t[SemanticAttributes.FAAS_TRIGGER]: 'http',",
"score": 0.8530499339103699
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tconst promise = tracer.startActiveSpan('fetchHandler', options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request, env, ctx)\n\t\t\tif (response.status < 500) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response\n\t\t} catch (error) {",
"score": 0.8519655466079712
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\t\tthrow error\n\t\t\t} finally {\n\t\t\t\torig_ctx.waitUntil(exportSpans(tracker))\n\t\t\t}\n\t\t},\n\t}\n\treturn wrap(fetchFn, fetchHandler)\n}\ntype getFetchConfig = (config: ResolvedTraceConfig) => FetcherConfig\nexport function instrumentFetcher(",
"score": 0.8514704704284668
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\tapply: async (target, _thisArg, argArray: Parameters<FetchHandler>): Promise<Response> => {\n\t\t\tconst [request, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, request)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: FetchHandlerArgs = [request, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeFetchHandler, undefined, target, args)\n\t\t\t} catch (error) {",
"score": 0.8410173058509827
}
] |
typescript
|
fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
|
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return
|
instrumentStorage(result)
} else if (typeof result === 'function') {
|
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
|
src/instrumentation/do.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/do-storage.ts",
"retrieved_chunk": "\treturn wrap(fn, fnHandler)\n}\nexport function instrumentStorage(storage: DurableObjectStorage): DurableObjectStorage {\n\tconst storageHandler: ProxyHandler<DurableObjectStorage> = {\n\t\tget: (target, prop, receiver) => {\n\t\t\tconst operation = String(prop)\n\t\t\tconst fn = Reflect.get(target, prop, receiver)\n\t\t\treturn instrumentStorageFn(fn, operation)\n\t\t},\n\t}",
"score": 0.9164421558380127
},
{
"filename": "src/instrumentation/env.ts",
"retrieved_chunk": "const isDurableObject = (item?: unknown): item is DurableObjectNamespace => {\n\treturn !!(item as DurableObjectNamespace)?.idFromName\n}\nconst instrumentEnv = (env: Record<string, unknown>): Record<string, unknown> => {\n\tconst envHandler: ProxyHandler<Record<string, unknown>> = {\n\t\tget: (target, prop, receiver) => {\n\t\t\tconst item = Reflect.get(target, prop, receiver)\n\t\t\tif (isKVNamespace(item)) {\n\t\t\t\treturn instrumentKV(item, String(prop))\n\t\t\t} else if (isQueue(item)) {",
"score": 0.867032527923584
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "\t\t\t\treturn result\n\t\t\t})\n\t\t},\n\t}\n\treturn wrap(fn, handler)\n}\nfunction instrumentCache(cache: Cache, cacheName: string): Cache {\n\tconst handler: ProxyHandler<typeof cache> = {\n\t\tget(target, prop) {\n\t\t\tif (prop === 'delete' || prop === 'match' || prop === 'put') {",
"score": 0.8616809844970703
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "\tconst handler: ProxyHandler<typeof caches> = {\n\t\tget(target, prop) {\n\t\t\tif (prop === 'default') {\n\t\t\t\tconst cache = target.default\n\t\t\t\treturn instrumentCache(cache, 'default')\n\t\t\t} else if (prop === 'open') {\n\t\t\t\tconst openFn = Reflect.get(target, prop).bind(target)\n\t\t\t\treturn instrumentOpen(openFn)\n\t\t\t} else {\n\t\t\t\treturn Reflect.get(target, prop)",
"score": 0.8566259741783142
},
{
"filename": "src/instrumentation/kv.ts",
"retrieved_chunk": "\t\tget: (target, prop, receiver) => {\n\t\t\tconst operation = String(prop)\n\t\t\tconst fn = Reflect.get(target, prop, receiver)\n\t\t\treturn instrumentKVFn(fn, name, operation)\n\t\t},\n\t}\n\treturn wrap(kv, kvHandler)\n}",
"score": 0.8481420874595642
}
] |
typescript
|
instrumentStorage(result)
} else if (typeof result === 'function') {
|
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
|
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
|
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
|
src/instrumentation/do.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/do-storage.ts",
"retrieved_chunk": "\treturn wrap(fn, fnHandler)\n}\nexport function instrumentStorage(storage: DurableObjectStorage): DurableObjectStorage {\n\tconst storageHandler: ProxyHandler<DurableObjectStorage> = {\n\t\tget: (target, prop, receiver) => {\n\t\t\tconst operation = String(prop)\n\t\t\tconst fn = Reflect.get(target, prop, receiver)\n\t\t\treturn instrumentStorageFn(fn, operation)\n\t\t},\n\t}",
"score": 0.903170645236969
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "\t\t\t\treturn result\n\t\t\t})\n\t\t},\n\t}\n\treturn wrap(fn, handler)\n}\nfunction instrumentCache(cache: Cache, cacheName: string): Cache {\n\tconst handler: ProxyHandler<typeof cache> = {\n\t\tget(target, prop) {\n\t\t\tif (prop === 'delete' || prop === 'match' || prop === 'put') {",
"score": 0.8570482730865479
},
{
"filename": "src/instrumentation/env.ts",
"retrieved_chunk": "const isDurableObject = (item?: unknown): item is DurableObjectNamespace => {\n\treturn !!(item as DurableObjectNamespace)?.idFromName\n}\nconst instrumentEnv = (env: Record<string, unknown>): Record<string, unknown> => {\n\tconst envHandler: ProxyHandler<Record<string, unknown>> = {\n\t\tget: (target, prop, receiver) => {\n\t\t\tconst item = Reflect.get(target, prop, receiver)\n\t\t\tif (isKVNamespace(item)) {\n\t\t\t\treturn instrumentKV(item, String(prop))\n\t\t\t} else if (isQueue(item)) {",
"score": 0.8548110723495483
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "\tconst handler: ProxyHandler<typeof caches> = {\n\t\tget(target, prop) {\n\t\t\tif (prop === 'default') {\n\t\t\t\tconst cache = target.default\n\t\t\t\treturn instrumentCache(cache, 'default')\n\t\t\t} else if (prop === 'open') {\n\t\t\t\tconst openFn = Reflect.get(target, prop).bind(target)\n\t\t\t\treturn instrumentOpen(openFn)\n\t\t\t} else {\n\t\t\t\treturn Reflect.get(target, prop)",
"score": 0.8387748599052429
},
{
"filename": "src/instrumentation/kv.ts",
"retrieved_chunk": "\t\tget: (target, prop, receiver) => {\n\t\t\tconst operation = String(prop)\n\t\t\tconst fn = Reflect.get(target, prop, receiver)\n\t\t\treturn instrumentKVFn(fn, name, operation)\n\t\t},\n\t}\n\treturn wrap(kv, kvHandler)\n}",
"score": 0.8382748961448669
}
] |
typescript
|
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
|
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const
|
env = instrumentEnv(orig_env)
const createDO = () => {
|
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
|
src/instrumentation/do.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/sdk.ts",
"retrieved_chunk": "}\nexport function instrumentDO(doClass: DOClass, config: ConfigurationOption) {\n\tconst initialiser = createInitialiser(config)\n\treturn instrumentDOClass(doClass, initialiser)\n}\nexport { waitUntilTrace } from './instrumentation/fetch.js'",
"score": 0.8491382002830505
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "}\nfunction createInitialiser(config: ConfigurationOption): Initialiser {\n\tif (typeof config === 'function') {\n\t\treturn (env, trigger) => {\n\t\t\tconst conf = parseConfig(config(env, trigger))\n\t\t\tinit(conf)\n\t\t\treturn conf\n\t\t}\n\t} else {\n\t\treturn () => {",
"score": 0.8146401047706604
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "import { instrumentGlobalCache } from './instrumentation/cache.js'\nimport { createQueueHandler } from './instrumentation/queue.js'\nimport { DOClass, instrumentDOClass } from './instrumentation/do.js'\ntype FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>\ntype QueueHandler = ExportedHandlerQueueHandler\nexport type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig\nexport type ConfigurationOption = TraceConfig | ResolveConfigFn\nexport function isRequest(trigger: Trigger): trigger is Request {\n\treturn trigger instanceof Request\n}",
"score": 0.8040891885757446
},
{
"filename": "src/types.ts",
"retrieved_chunk": "}\nexport interface ResolvedTraceConfig extends TraceConfig {\n\texporter: SpanExporter\n\thandlers: Required<HandlerConfig>\n\tfetch: Required<FetcherConfig>\n\tpostProcessor: PostProcessorFn\n\tsampling: Required<SamplingConfig<Sampler>>\n}\nexport interface DOConstructorTrigger {\n\tid: string",
"score": 0.8032724857330322
},
{
"filename": "src/instrumentation/env.ts",
"retrieved_chunk": "const isDurableObject = (item?: unknown): item is DurableObjectNamespace => {\n\treturn !!(item as DurableObjectNamespace)?.idFromName\n}\nconst instrumentEnv = (env: Record<string, unknown>): Record<string, unknown> => {\n\tconst envHandler: ProxyHandler<Record<string, unknown>> = {\n\t\tget: (target, prop, receiver) => {\n\t\t\tconst item = Reflect.get(target, prop, receiver)\n\t\t\tif (isKVNamespace(item)) {\n\t\t\t\treturn instrumentKV(item, String(prop))\n\t\t\t} else if (isQueue(item)) {",
"score": 0.8019448518753052
}
] |
typescript
|
env = instrumentEnv(orig_env)
const createDO = () => {
|
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
|
const context = setConfig(config)
try {
|
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
|
src/instrumentation/do.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\tapply: async (target, _thisArg, argArray: Parameters<FetchHandler>): Promise<Response> => {\n\t\t\tconst [request, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, request)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: FetchHandlerArgs = [request, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeFetchHandler, undefined, target, args)\n\t\t\t} catch (error) {",
"score": 0.9055842161178589
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\treturn promise\n\t\t},\n\t}\n\treturn wrap(fetchFn, handler, true)\n}\nexport function instrumentGlobalFetch(): void {\n\tglobalThis.fetch = instrumentFetcher(globalThis.fetch, (config) => config.fetch)\n}",
"score": 0.8877949118614197
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createFetchHandler(fetchFn: FetchHandler, initialiser: Initialiser) {\n\tconst fetchHandler: ProxyHandler<FetchHandler> = {",
"score": 0.878385066986084
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tfetchFn: Fetcher['fetch'],\n\tconfigFn: getFetchConfig,\n\tattrs?: Attributes\n): Fetcher['fetch'] {\n\tconst handler: ProxyHandler<typeof fetch> = {\n\t\tapply: (target, thisArg, argArray): ReturnType<typeof fetch> => {\n\t\t\tconst workerConfig = getActiveConfig()\n\t\t\tconst config = configFn(workerConfig)\n\t\t\tconst request = new Request(argArray[0], argArray[1])\n\t\t\tconst tracer = trace.getTracer('fetcher')",
"score": 0.8641493916511536
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\tawait fn()\n\t\tspan.end()\n\t})\n}\nlet cold_start = true\nexport function executeFetchHandler(fetchFn: FetchHandler, [request, env, ctx]: FetchHandlerArgs): Promise<Response> {\n\tconst spanContext = getParentContextFromRequest(request)\n\tconst tracer = trace.getTracer('fetchHandler')\n\tconst attributes = {\n\t\t[SemanticAttributes.FAAS_TRIGGER]: 'http',",
"score": 0.8541744947433472
}
] |
typescript
|
const context = setConfig(config)
try {
|
/* eslint-disable @next/next/no-img-element */
import { DialogueElement } from "@/types/DialogueElement";
import { AvatarIcon } from "../AvatarIcon";
import { EmojiWrap } from "../EmojiWrap";
export const DialogueElementItem: React.FC<{
dialogueElement: DialogueElement;
dialogueIndex: number;
isResponding: boolean;
}> = ({ dialogueElement, dialogueIndex, isResponding }) => {
return (
<div
className={`dialogueElementItem ${
dialogueElement.who === "assistant"
? "dialogueElementItemAssistant"
: "dialogueElementItemHuman"
}`}
key={dialogueIndex}
>
<div
className="avatarIconWrap"
style={{
display: "flex",
justifyItems: "center",
flexShrink: 0,
}}
>
<AvatarIcon who={dialogueElement.who} />
</div>
<div className="dialogueElementWrap">
<div
className="dialogueTextWrap"
style={{
paddingLeft: "5px",
paddingRight: "5px",
flexGrow: 1,
maxWidth: "100%",
}}
>
{dialogueElement.text?.split("\n").map((row, rowIdx) => {
return (
<div
className="dialogueTextRow"
key={`${dialogueIndex}-${rowIdx}`}
style={{
minHeight: "1em",
maxWidth: "100%",
wordBreak: "break-all",
}}
>
{row}
{isResponding &&
rowIdx === dialogueElement.text.split("\n").length - 1 && (
<span className="blinkingCursor" />
)}
</div>
);
})}
{!isResponding &&
dialogueElement.textEnd?.split("\n").map((row, rowIdx) => {
return (
<div
key={`${dialogueIndex}-${rowIdx}-end`}
style={{
minHeight: "1em",
marginLeft: row.startsWith(" ") ? "1em" : "0px",
}}
>
{row}
</div>
);
})}
</div>
{!isResponding && (
<div
className="dialogueEmojiListWrap"
style={{
position: "relative",
top: 0,
display: "flex",
padding: "6px 0",
height: "auto",
}}
>
{dialogueElement.emojiList.map((emojiValue) => {
return (
<
|
EmojiWrap
key={emojiValue.name}
|
emoji={emojiValue.name}
count={emojiValue.count}
/>
);
})}
</div>
)}
</div>
</div>
);
};
|
src/components/DialogueElementItem/index.tsx
|
yuiseki-LUNATIC-0c1872d
|
[
{
"filename": "src/app/page.tsx",
"retrieved_chunk": " <main suppressHydrationWarning className=\"main\">\n <div\n className=\"dialogueListWrap\"\n style={{\n width: \"100%\",\n margin: \"0 auto 5em\",\n }}\n >\n {dialogueList.map((dialogueElement, dialogueIndex) => {\n return (",
"score": 0.8775285482406616
},
{
"filename": "src/components/EmojiWrap/index.tsx",
"retrieved_chunk": "/* eslint-disable @next/next/no-img-element */\nimport { emojiDict } from \"@/const/emojiDict\";\nexport const EmojiWrap: React.FC<{\n emoji: string;\n count?: number;\n}> = ({ emoji, count }) => {\n return (\n <div\n className=\"emojiWrap\"\n style={{",
"score": 0.833989143371582
},
{
"filename": "src/components/EmojiWrap/index.tsx",
"retrieved_chunk": " className=\"emoji\"\n style={{\n fontSize: \"2em\",\n color: \"blue\",\n fontFamily: \"serif\",\n fontWeight: \"bold\",\n }}\n >\n {(() => {\n if (Object.keys(emojiDict).includes(emoji)) {",
"score": 0.8323378562927246
},
{
"filename": "src/app/page.tsx",
"retrieved_chunk": " <DialogueElementItem\n key={dialogueIndex}\n dialogueElement={dialogueElement}\n dialogueIndex={dialogueIndex}\n isResponding={\n (responding || lazyInserting) &&\n dialogueIndex === dialogueList.length - 1\n }\n />\n );",
"score": 0.8277420997619629
},
{
"filename": "src/components/EmojiWrap/index.tsx",
"retrieved_chunk": " cursor: \"pointer\",\n fontFamily: \"sans-serif, emoji\",\n display: \"flex\",\n padding: \"0 8px\",\n justifyContent: \"center\",\n alignItems: \"center\",\n zIndex: 50,\n }}\n >\n <span",
"score": 0.8242154121398926
}
] |
typescript
|
EmojiWrap
key={emojiValue.name}
|
import { trace, SpanOptions, SpanKind, Attributes, Exception, context as api_context } from '@opentelemetry/api'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans, proxyExecutionContext } from './common.js'
import { instrumentEnv } from './env.js'
import { unwrap, wrap } from '../wrap.js'
type QueueHandler = ExportedHandlerQueueHandler<unknown, unknown>
export type QueueHandlerArgs = Parameters<QueueHandler>
const traceIdSymbol = Symbol('traceId')
class MessageStatusCount {
succeeded = 0
failed = 0
readonly total: number
constructor(total: number) {
this.total = total
}
ack() {
this.succeeded = this.succeeded + 1
}
ackRemaining() {
this.succeeded = this.total - this.failed
}
retry() {
this.failed = this.failed + 1
}
retryRemaining() {
this.failed = this.total - this.succeeded
}
toAttributes(): Attributes {
return {
'queue.messages_count': this.total,
'queue.messages_success': this.succeeded,
'queue.messages_failed': this.failed,
'queue.batch_success': this.succeeded === this.total,
}
}
}
const addEvent = (name: string, msg?: Message) => {
const attrs: Attributes = {}
if (msg) {
attrs['queue.message_id'] = msg.id
attrs['queue.message_timestamp'] = msg.timestamp.toISOString()
}
trace.getActiveSpan()?.addEvent(name, attrs)
}
const proxyQueueMessage = <Q>(msg: Message<Q>, count: MessageStatusCount): Message<Q> => {
const msgHandler: ProxyHandler<Message<Q>> = {
get: (target, prop) => {
if (prop === 'ack') {
const ackFn = Reflect.get(target, prop)
return new Proxy(ackFn, {
apply: (fnTarget) => {
addEvent('messageAck', msg)
count.ack()
//TODO: handle errors
Reflect.apply(fnTarget, msg, [])
},
})
} else if (prop === 'retry') {
const retryFn = Reflect.get(target, prop)
return new Proxy(retryFn, {
apply: (fnTarget) => {
addEvent('messageRetry', msg)
count.retry()
//TODO: handle errors
const result = Reflect.apply(fnTarget, msg, [])
return result
},
})
} else {
return Reflect.get(target, prop, msg)
}
},
}
return wrap(msg, msgHandler)
}
const proxyMessageBatch = <E, Q>(batch: MessageBatch, count: MessageStatusCount) => {
const batchHandler: ProxyHandler<MessageBatch> = {
get: (target, prop) => {
if (prop === 'messages') {
const messages = Reflect.get(target, prop)
const messagesHandler: ProxyHandler<MessageBatch['messages']> = {
get: (target, prop) => {
if (typeof prop === 'string' && !isNaN(parseInt(prop))) {
const message = Reflect.get(target, prop)
return proxyQueueMessage(message, count)
} else {
return Reflect.get(target, prop)
}
},
}
return wrap(messages, messagesHandler)
} else if (prop === 'ackAll') {
const ackFn = Reflect.get(target, prop)
return new Proxy(ackFn, {
apply: (fnTarget) => {
addEvent('ackAll')
count.ackRemaining()
//TODO: handle errors
Reflect.apply(fnTarget, batch, [])
},
})
} else if (prop === 'retryAll') {
const retryFn = Reflect.get(target, prop)
return new Proxy(retryFn, {
apply: (fnTarget) => {
addEvent('retryAll')
count.retryRemaining()
//TODO: handle errors
Reflect.apply(fnTarget, batch, [])
},
})
}
return Reflect.get(target, prop)
},
}
return wrap(batch, batchHandler)
}
export function executeQueueHandler(queueFn: QueueHandler, [batch, env, ctx]: QueueHandlerArgs): Promise<void> {
const count = new MessageStatusCount(batch.messages.length)
batch = proxyMessageBatch(batch, count)
const tracer = trace.getTracer('queueHandler')
const options: SpanOptions = {
attributes: {
'queue.name': batch.queue,
},
kind: SpanKind.CONSUMER,
}
const promise = tracer.startActiveSpan(`queueHandler:${batch.queue}`, options, async (span) => {
const traceId = span.spanContext().traceId
api_context.active().setValue(traceIdSymbol, traceId)
try {
const result = queueFn(batch, env, ctx)
await span.setAttribute('queue.implicitly_acked', count.total - count.succeeded - count.failed)
count.ackRemaining()
span.setAttributes(count.toAttributes())
span.end()
return result
} catch (error) {
span.recordException(error as Exception)
span.setAttribute('queue.implicitly_retried', count.total - count.succeeded - count.failed)
count.retryRemaining()
span.end()
throw error
}
})
return promise
}
|
export function createQueueHandler(queueFn: QueueHandler, initialiser: Initialiser) {
|
const queueHandler: ProxyHandler<QueueHandler> = {
async apply(target, _thisArg, argArray: Parameters<QueueHandler>): Promise<void> {
const [batch, orig_env, orig_ctx] = argArray
const config = initialiser(orig_env as Record<string, unknown>, batch)
const env = instrumentEnv(orig_env as Record<string, unknown>)
const { ctx, tracker } = proxyExecutionContext(orig_ctx)
const context = setConfig(config)
try {
const args: QueueHandlerArgs = [batch, env, ctx]
return await api_context.with(context, executeQueueHandler, undefined, target, args)
} catch (error) {
throw error
} finally {
orig_ctx.waitUntil(exportSpans(tracker))
}
},
}
return wrap(queueFn, queueHandler)
}
function instrumentQueueSend(fn: Queue<unknown>['send'], name: string): Queue<unknown>['send'] {
const tracer = trace.getTracer('queueSender')
const handler: ProxyHandler<Queue<unknown>['send']> = {
apply: (target, thisArg, argArray) => {
return tracer.startActiveSpan(`queueSend: ${name}`, async (span) => {
span.setAttribute('queue.operation', 'send')
await Reflect.apply(target, unwrap(thisArg), argArray)
span.end()
})
},
}
return wrap(fn, handler)
}
function instrumentQueueSendBatch(fn: Queue<unknown>['sendBatch'], name: string): Queue<unknown>['sendBatch'] {
const tracer = trace.getTracer('queueSender')
const handler: ProxyHandler<Queue<unknown>['sendBatch']> = {
apply: (target, thisArg, argArray) => {
return tracer.startActiveSpan(`queueSendBatch: ${name}`, async (span) => {
span.setAttribute('queue.operation', 'sendBatch')
await Reflect.apply(target, unwrap(thisArg), argArray)
span.end()
})
},
}
return wrap(fn, handler)
}
export function instrumentQueueSender(queue: Queue<unknown>, name: string) {
const queueHandler: ProxyHandler<Queue<unknown>> = {
get: (target, prop) => {
if (prop === 'send') {
const sendFn = Reflect.get(target, prop)
return instrumentQueueSend(sendFn, name)
} else if (prop === 'sendBatch') {
const sendFn = Reflect.get(target, prop)
return instrumentQueueSendBatch(sendFn, name)
} else {
return Reflect.get(target, prop)
}
},
}
return wrap(queue, queueHandler)
}
|
src/instrumentation/queue.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createFetchHandler(fetchFn: FetchHandler, initialiser: Initialiser) {\n\tconst fetchHandler: ProxyHandler<FetchHandler> = {",
"score": 0.8771988153457642
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t} catch (error) {\n\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nfunction instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {",
"score": 0.8400025963783264
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "\tconst initialiser = createInitialiser(config)\n\tif (handler.fetch) {\n\t\tconst fetcher = unwrap(handler.fetch) as FetchHandler\n\t\thandler.fetch = createFetchHandler(fetcher, initialiser)\n\t}\n\tif (handler.queue) {\n\t\tconst queuer = unwrap(handler.queue) as QueueHandler\n\t\thandler.queue = createQueueHandler(queuer, initialiser)\n\t}\n\treturn handler",
"score": 0.8319547772407532
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t} finally {\n\t\t\t\texportSpans()\n\t\t\t}\n\t\t},\n\t}\n\treturn wrap(fetchFn, fetchHandler)\n}\nfunction instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {\n\tif (!alarmFn) return undefined\n\tconst alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {",
"score": 0.8119767904281616
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tconst promise = tracer.startActiveSpan('fetchHandler', options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request, env, ctx)\n\t\t\tif (response.status < 500) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response\n\t\t} catch (error) {",
"score": 0.8003801703453064
}
] |
typescript
|
export function createQueueHandler(queueFn: QueueHandler, initialiser: Initialiser) {
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
|
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
|
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/provider.ts",
"retrieved_chunk": "\tgetTracer(name: string, version?: string, options?: TracerOptions): Tracer {\n\t\tconst key = `${name}@${version || ''}:${options?.schemaUrl || ''}`\n\t\tif (!this.tracers[key]) {\n\t\t\tthis.tracers[key] = new WorkerTracer(this.spanProcessor, this.resource)\n\t\t}\n\t\treturn this.tracers[key]\n\t}\n\tregister(): void {\n\t\ttrace.setGlobalTracerProvider(this)\n\t\tcontext.setGlobalContextManager(new AsyncLocalStorageContextManager())",
"score": 0.875576376914978
},
{
"filename": "src/provider.ts",
"retrieved_chunk": " * @param config Configuration object for SDK registration\n */\nexport class WorkerTracerProvider implements TracerProvider {\n\tprivate spanProcessor: SpanProcessor\n\tprivate resource: Resource\n\tprivate tracers: Record<string, Tracer> = {}\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {\n\t\tthis.spanProcessor = spanProcessor\n\t\tthis.resource = resource\n\t}",
"score": 0.8687787055969238
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "import { sanitizeAttributes } from '@opentelemetry/core'\nimport { Resource } from '@opentelemetry/resources'\nimport { SpanProcessor, RandomIdGenerator, ReadableSpan, SamplingDecision } from '@opentelemetry/sdk-trace-base'\nimport { SpanImpl } from './span.js'\nimport { getActiveConfig } from './config.js'\nexport class WorkerTracer implements Tracer {\n\tprivate readonly _spanProcessor: SpanProcessor\n\tprivate readonly resource: Resource\n\tprivate readonly idGenerator: RandomIdGenerator = new RandomIdGenerator()\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {",
"score": 0.8496503233909607
},
{
"filename": "src/provider.ts",
"retrieved_chunk": "import { context, trace, Tracer, TracerOptions, TracerProvider } from '@opentelemetry/api'\nimport { SpanProcessor } from '@opentelemetry/sdk-trace-base'\nimport { Resource } from '@opentelemetry/resources'\nimport { AsyncLocalStorageContextManager } from './context.js'\nimport { WorkerTracer } from './tracer.js'\n/**\n * Register this TracerProvider for use with the OpenTelemetry API.\n * Undefined values may be replaced with defaults, and\n * null values will be skipped.\n *",
"score": 0.8429477214813232
},
{
"filename": "src/instrumentation/common.ts",
"retrieved_chunk": "export async function exportSpans(tracker?: PromiseTracker) {\n\tconst tracer = trace.getTracer('export')\n\tif (tracer instanceof WorkerTracer) {\n\t\tawait scheduler.wait(1)\n\t\tif (tracker) {\n\t\t\tawait tracker.wait()\n\t\t}\n\t\tawait tracer.spanProcessor.forceFlush()\n\t} else {\n\t\tconsole.error('The global tracer is not of type WorkerTracer and can not export spans')",
"score": 0.8384515643119812
}
] |
typescript
|
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
|
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
|
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t}\n\t\t},\n\t}\n\treturn wrap(doObj, objHandler)\n}\nexport function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {\n\tconst classHandler: ProxyHandler<DOClass> = {\n\t\tconstruct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {\n\t\t\tconst trigger: DOConstructorTrigger = {\n\t\t\t\tid: orig_state.id.toString(),",
"score": 0.8934004902839661
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t}\n\t\t},\n\t}\n\treturn wrap(alarmFn, alarmHandler)\n}\nfunction instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {\n\tconst objHandler: ProxyHandler<DurableObject> = {\n\t\tget(target, prop) {\n\t\t\tif (prop === 'fetch') {\n\t\t\t\tconst fetchFn = Reflect.get(target, prop)",
"score": 0.8546265363693237
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\treturn instrumentDurableObject(doObj, initialiser, env, state)\n\t\t},\n\t}\n\treturn wrap(doClass, classHandler)\n}",
"score": 0.8413252830505371
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "import { instrumentEnv } from './env.js'\nimport { Initialiser, setConfig } from '../config.js'\nimport { exportSpans } from './common.js'\nimport { instrumentStorage } from './do-storage.js'\nimport { DOConstructorTrigger } from '../types.js'\ntype FetchFn = DurableObject['fetch']\ntype AlarmFn = DurableObject['alarm']\ntype Env = Record<string, unknown>\nfunction instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {\n\tconst stubHandler: ProxyHandler<typeof stub> = {",
"score": 0.8377835750579834
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t\treturn instrumentFetchFn(fetchFn, initialiser, env, state.id)\n\t\t\t} else if (prop === 'alarm') {\n\t\t\t\tconst alarmFn = Reflect.get(target, prop)\n\t\t\t\treturn instrumentAlarmFn(alarmFn, initialiser, env, state.id)\n\t\t\t} else {\n\t\t\t\tconst result = Reflect.get(target, prop)\n\t\t\t\tif (typeof result === 'function') {\n\t\t\t\t\tresult.bind(doObj)\n\t\t\t\t}\n\t\t\t\treturn result",
"score": 0.8332451581954956
}
] |
typescript
|
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
|
function createInitialiser(config: ConfigurationOption): Initialiser {
|
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/types.ts",
"retrieved_chunk": "import { ReadableSpan, Sampler, SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { OTLPExporterConfig } from './exporter.js'\nimport { FetchHandlerConfig, FetcherConfig } from './instrumentation/fetch.js'\nimport { TailSampleFn } from './sampling.js'\nexport type PostProcessorFn = (spans: ReadableSpan[]) => ReadableSpan[]\nexport type ExporterConfig = OTLPExporterConfig | SpanExporter\nexport interface HandlerConfig {\n\tfetch?: FetchHandlerConfig\n}\nexport interface ServiceConfig {",
"score": 0.8145056366920471
},
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 0.8071998357772827
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "type StartExportArguments = {\n\texporter: SpanExporter\n\ttailSampler: TailSampleFn\n\tpostProcessor: PostProcessorFn\n}\ntype StartSpanAction = Action<'startSpan', { span: Span }>\ntype EndSpanAction = Action<'endSpan', { span: ReadableSpan }>\ntype StartExportAction = Action<'startExport', { args: StartExportArguments }>\nfunction newTrace(currentState: InitialState, { span }: StartSpanAction): InProgressTraceState {\n\tconst spanId = span.spanContext().spanId",
"score": 0.7914695739746094
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tif (options.root) {\n\t\t\tcontext = trace.deleteSpan(context)\n\t\t}\n\t\tconst parentSpan = trace.getSpan(context)\n\t\tconst parentSpanContext = parentSpan?.spanContext()\n\t\tconst hasParentContext = parentSpanContext && trace.isSpanContextValid(parentSpanContext)\n\t\tconst traceId = hasParentContext ? parentSpanContext.traceId : this.idGenerator.generateTraceId()\n\t\tconst spanKind = options.kind || SpanKind.INTERNAL\n\t\tconst sanitisedAttrs = sanitizeAttributes(options.attributes)\n\t\tconst sampler = getActiveConfig().sampling.headSampler",
"score": 0.787299394607544
},
{
"filename": "src/types.ts",
"retrieved_chunk": "\tname: string\n\tnamespace?: string\n\tversion?: string\n}\nexport interface ParentRatioSamplingConfig {\n\tacceptRemote?: boolean\n\tratio: number\n}\ntype HeadSamplerConf = Sampler | ParentRatioSamplingConfig\nexport interface SamplingConfig<HS extends HeadSamplerConf = HeadSamplerConf> {",
"score": 0.7872796058654785
}
] |
typescript
|
function createInitialiser(config: ConfigurationOption): Initialiser {
|
import { trace, SpanOptions, SpanKind, Attributes, Exception, context as api_context } from '@opentelemetry/api'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans, proxyExecutionContext } from './common.js'
import { instrumentEnv } from './env.js'
import { unwrap, wrap } from '../wrap.js'
type QueueHandler = ExportedHandlerQueueHandler<unknown, unknown>
export type QueueHandlerArgs = Parameters<QueueHandler>
const traceIdSymbol = Symbol('traceId')
class MessageStatusCount {
succeeded = 0
failed = 0
readonly total: number
constructor(total: number) {
this.total = total
}
ack() {
this.succeeded = this.succeeded + 1
}
ackRemaining() {
this.succeeded = this.total - this.failed
}
retry() {
this.failed = this.failed + 1
}
retryRemaining() {
this.failed = this.total - this.succeeded
}
toAttributes(): Attributes {
return {
'queue.messages_count': this.total,
'queue.messages_success': this.succeeded,
'queue.messages_failed': this.failed,
'queue.batch_success': this.succeeded === this.total,
}
}
}
const addEvent = (name: string, msg?: Message) => {
const attrs: Attributes = {}
if (msg) {
attrs['queue.message_id'] = msg.id
attrs['queue.message_timestamp'] = msg.timestamp.toISOString()
}
trace.getActiveSpan()?.addEvent(name, attrs)
}
const proxyQueueMessage = <Q>(msg: Message<Q>, count: MessageStatusCount): Message<Q> => {
const msgHandler: ProxyHandler<Message<Q>> = {
get: (target, prop) => {
if (prop === 'ack') {
const ackFn = Reflect.get(target, prop)
return new Proxy(ackFn, {
apply: (fnTarget) => {
addEvent('messageAck', msg)
count.ack()
//TODO: handle errors
Reflect.apply(fnTarget, msg, [])
},
})
} else if (prop === 'retry') {
const retryFn = Reflect.get(target, prop)
return new Proxy(retryFn, {
apply: (fnTarget) => {
addEvent('messageRetry', msg)
count.retry()
//TODO: handle errors
const result = Reflect.apply(fnTarget, msg, [])
return result
},
})
} else {
return Reflect.get(target, prop, msg)
}
},
}
return wrap(msg, msgHandler)
}
const proxyMessageBatch = <E, Q>(batch: MessageBatch, count: MessageStatusCount) => {
const batchHandler: ProxyHandler<MessageBatch> = {
get: (target, prop) => {
if (prop === 'messages') {
const messages = Reflect.get(target, prop)
const messagesHandler: ProxyHandler<MessageBatch['messages']> = {
get: (target, prop) => {
if (typeof prop === 'string' && !isNaN(parseInt(prop))) {
const message = Reflect.get(target, prop)
return proxyQueueMessage(message, count)
} else {
return Reflect.get(target, prop)
}
},
}
return wrap(messages, messagesHandler)
} else if (prop === 'ackAll') {
const ackFn = Reflect.get(target, prop)
return new Proxy(ackFn, {
apply: (fnTarget) => {
addEvent('ackAll')
count.ackRemaining()
//TODO: handle errors
Reflect.apply(fnTarget, batch, [])
},
})
} else if (prop === 'retryAll') {
const retryFn = Reflect.get(target, prop)
return new Proxy(retryFn, {
apply: (fnTarget) => {
addEvent('retryAll')
count.retryRemaining()
//TODO: handle errors
Reflect.apply(fnTarget, batch, [])
},
})
}
return Reflect.get(target, prop)
},
}
return wrap(batch, batchHandler)
}
export function executeQueueHandler(queueFn: QueueHandler, [batch, env, ctx]: QueueHandlerArgs): Promise<void> {
const count = new MessageStatusCount(batch.messages.length)
batch = proxyMessageBatch(batch, count)
const tracer = trace.getTracer('queueHandler')
const options: SpanOptions = {
attributes: {
'queue.name': batch.queue,
},
kind: SpanKind.CONSUMER,
}
const promise = tracer.startActiveSpan(`queueHandler:${batch.queue}`, options, async (span) => {
const traceId = span.spanContext().traceId
api_context.active().setValue(traceIdSymbol, traceId)
try {
const result = queueFn(batch, env, ctx)
await span.setAttribute('queue.implicitly_acked', count.total - count.succeeded - count.failed)
count.ackRemaining()
span.setAttributes(count.toAttributes())
span.end()
return result
} catch (error) {
span.recordException(error as Exception)
span.setAttribute('queue.implicitly_retried', count.total - count.succeeded - count.failed)
count.retryRemaining()
span.end()
throw error
}
})
return promise
}
export function createQueueHandler(queueFn: QueueHandler, initialiser: Initialiser) {
const queueHandler: ProxyHandler<QueueHandler> = {
async apply(target, _thisArg, argArray: Parameters<QueueHandler>): Promise<void> {
const [batch, orig_env, orig_ctx] = argArray
const config = initialiser(orig_env as Record<string, unknown>, batch)
const env = instrumentEnv(orig_env as Record<string, unknown>)
const { ctx, tracker } = proxyExecutionContext(orig_ctx)
const context = setConfig(config)
try {
const args: QueueHandlerArgs = [batch, env, ctx]
return await api_context.with(context, executeQueueHandler, undefined, target, args)
} catch (error) {
throw error
} finally {
orig_ctx.waitUntil(exportSpans(tracker))
}
},
}
return wrap(queueFn, queueHandler)
}
function instrumentQueueSend(fn: Queue<unknown>['send'], name: string): Queue<unknown>['send'] {
const tracer = trace.getTracer('queueSender')
const handler: ProxyHandler<Queue<unknown>['send']> = {
apply: (target, thisArg, argArray) => {
return tracer.startActiveSpan(`queueSend: ${name}`, async (span) => {
span.setAttribute('queue.operation', 'send')
|
await Reflect.apply(target, unwrap(thisArg), argArray)
span.end()
})
},
}
|
return wrap(fn, handler)
}
function instrumentQueueSendBatch(fn: Queue<unknown>['sendBatch'], name: string): Queue<unknown>['sendBatch'] {
const tracer = trace.getTracer('queueSender')
const handler: ProxyHandler<Queue<unknown>['sendBatch']> = {
apply: (target, thisArg, argArray) => {
return tracer.startActiveSpan(`queueSendBatch: ${name}`, async (span) => {
span.setAttribute('queue.operation', 'sendBatch')
await Reflect.apply(target, unwrap(thisArg), argArray)
span.end()
})
},
}
return wrap(fn, handler)
}
export function instrumentQueueSender(queue: Queue<unknown>, name: string) {
const queueHandler: ProxyHandler<Queue<unknown>> = {
get: (target, prop) => {
if (prop === 'send') {
const sendFn = Reflect.get(target, prop)
return instrumentQueueSend(sendFn, name)
} else if (prop === 'sendBatch') {
const sendFn = Reflect.get(target, prop)
return instrumentQueueSendBatch(sendFn, name)
} else {
return Reflect.get(target, prop)
}
},
}
return wrap(queue, queueHandler)
}
|
src/instrumentation/queue.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/kv.ts",
"retrieved_chunk": "\t\t\t\tattributes: {\n\t\t\t\t\tbinding_type: 'KV',\n\t\t\t\t\tkv_namespace: name,\n\t\t\t\t\toperation,\n\t\t\t\t},\n\t\t\t}\n\t\t\treturn tracer.startActiveSpan(`kv:${name}:${operation}`, options, async (span) => {\n\t\t\t\tconst result = await Reflect.apply(target, thisArg, argArray)\n\t\t\t\tconst extraAttrs = KVAttributes[operation] ? KVAttributes[operation](argArray, result) : {}\n\t\t\t\tspan.setAttributes(extraAttrs)",
"score": 0.8344078063964844
},
{
"filename": "src/instrumentation/do-storage.ts",
"retrieved_chunk": "\t\t\treturn tracer.startActiveSpan(`do:storage:${operation}`, options, async (span) => {\n\t\t\t\tconst result = await Reflect.apply(target, thisArg, argArray)\n\t\t\t\tconst extraAttrs = StorageAttributes[operation] ? StorageAttributes[operation](argArray, result) : {}\n\t\t\t\tspan.setAttributes(extraAttrs)\n\t\t\t\tspan.setAttribute('hasResult', !!result)\n\t\t\t\tspan.end()\n\t\t\t\treturn result\n\t\t\t})\n\t\t},\n\t}",
"score": 0.8309239149093628
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "\t\t\t\t'http.url': argArray[0].url ? sanitiseURL(argArray[0].url) : undefined,\n\t\t\t\t'cache.operation': op,\n\t\t\t}\n\t\t\tconst options: SpanOptions = { kind: SpanKind.CLIENT, attributes }\n\t\t\treturn tracer.startActiveSpan(`cache:${cacheName}:${op}`, options, async (span) => {\n\t\t\t\tconst result = await Reflect.apply(target, thisArg, argArray)\n\t\t\t\tif (op === 'match') {\n\t\t\t\t\tspan.setAttribute('cache.hit', !result)\n\t\t\t\t}\n\t\t\t\tspan.end()",
"score": 0.8285002708435059
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tfetchFn: Fetcher['fetch'],\n\tconfigFn: getFetchConfig,\n\tattrs?: Attributes\n): Fetcher['fetch'] {\n\tconst handler: ProxyHandler<typeof fetch> = {\n\t\tapply: (target, thisArg, argArray): ReturnType<typeof fetch> => {\n\t\t\tconst workerConfig = getActiveConfig()\n\t\t\tconst config = configFn(workerConfig)\n\t\t\tconst request = new Request(argArray[0], argArray[1])\n\t\t\tconst tracer = trace.getTracer('fetcher')",
"score": 0.8271347284317017
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "import { SpanKind, SpanOptions, trace } from '@opentelemetry/api'\nimport { wrap } from '../wrap.js'\nimport { sanitiseURL } from './fetch.js'\ntype CacheFns = Cache[keyof Cache]\nconst tracer = trace.getTracer('cache instrumentation')\nfunction instrumentFunction<T extends CacheFns>(fn: T, cacheName: string, op: string): T {\n\tconst handler: ProxyHandler<typeof fn> = {\n\t\tasync apply(target, thisArg, argArray) {\n\t\t\tconst attributes = {\n\t\t\t\t'cache.name': cacheName,",
"score": 0.8241417407989502
}
] |
typescript
|
await Reflect.apply(target, unwrap(thisArg), argArray)
span.end()
})
},
}
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
|
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
|
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 0.8380247354507446
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tconst samplingDecision = sampler.shouldSample(context, traceId, name, spanKind, sanitisedAttrs, [])\n\t\tconst { decision, traceState, attributes: attrs } = samplingDecision\n\t\tconst attributes = Object.assign({}, sanitisedAttrs, attrs)\n\t\tconst spanId = this.idGenerator.generateSpanId()\n\t\tconst parentSpanId = hasParentContext ? parentSpanContext.spanId : undefined\n\t\tconst traceFlags = decision === SamplingDecision.RECORD_AND_SAMPLED ? TraceFlags.SAMPLED : TraceFlags.NONE\n\t\tconst spanContext = { traceId, spanId, traceFlags, traceState }\n\t\tconst span = new SpanImpl({\n\t\t\tattributes,\n\t\t\tname,",
"score": 0.8344570994377136
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tif (options.root) {\n\t\t\tcontext = trace.deleteSpan(context)\n\t\t}\n\t\tconst parentSpan = trace.getSpan(context)\n\t\tconst parentSpanContext = parentSpan?.spanContext()\n\t\tconst hasParentContext = parentSpanContext && trace.isSpanContextValid(parentSpanContext)\n\t\tconst traceId = hasParentContext ? parentSpanContext.traceId : this.idGenerator.generateTraceId()\n\t\tconst spanKind = options.kind || SpanKind.INTERNAL\n\t\tconst sanitisedAttrs = sanitizeAttributes(options.attributes)\n\t\tconst sampler = getActiveConfig().sampling.headSampler",
"score": 0.8306845426559448
},
{
"filename": "src/types.ts",
"retrieved_chunk": "import { ReadableSpan, Sampler, SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { OTLPExporterConfig } from './exporter.js'\nimport { FetchHandlerConfig, FetcherConfig } from './instrumentation/fetch.js'\nimport { TailSampleFn } from './sampling.js'\nexport type PostProcessorFn = (spans: ReadableSpan[]) => ReadableSpan[]\nexport type ExporterConfig = OTLPExporterConfig | SpanExporter\nexport interface HandlerConfig {\n\tfetch?: FetchHandlerConfig\n}\nexport interface ServiceConfig {",
"score": 0.8245791792869568
},
{
"filename": "src/sampling.ts",
"retrieved_chunk": "\t\treturn samplers.reduce((result, sampler) => result || sampler(traceInfo), false)\n\t}\n}\nexport const isHeadSampled: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.spanContext().traceFlags === TraceFlags.SAMPLED\n}\nexport const isRootErrorSpan: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.status.code === SpanStatusCode.ERROR",
"score": 0.8119543194770813
}
] |
typescript
|
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return
|
(env, trigger) => {
|
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/config.ts",
"retrieved_chunk": "import { context } from '@opentelemetry/api'\nimport { ResolvedTraceConfig, Trigger } from './types.js'\nconst configSymbol = Symbol('Otel Workers Tracing Configuration')\nexport type Initialiser = (env: Record<string, unknown>, trigger: Trigger) => ResolvedTraceConfig\nexport function setConfig(config: ResolvedTraceConfig, ctx = context.active()) {\n\treturn ctx.setValue(configSymbol, config)\n}\nexport function getActiveConfig(): ResolvedTraceConfig {\n\tconst config = context.active().getValue(configSymbol) as ResolvedTraceConfig\n\treturn config",
"score": 0.8030052781105042
},
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 0.7990358471870422
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t} finally {\n\t\t\t\texportSpans()\n\t\t\t}\n\t\t},\n\t}\n\treturn wrap(fetchFn, fetchHandler)\n}\nfunction instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {\n\tif (!alarmFn) return undefined\n\tconst alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {",
"score": 0.7967430353164673
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t} catch (error) {\n\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nfunction instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {",
"score": 0.7938843965530396
},
{
"filename": "src/sampling.ts",
"retrieved_chunk": "\t\treturn samplers.reduce((result, sampler) => result || sampler(traceInfo), false)\n\t}\n}\nexport const isHeadSampled: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.spanContext().traceFlags === TraceFlags.SAMPLED\n}\nexport const isRootErrorSpan: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.status.code === SpanStatusCode.ERROR",
"score": 0.7922747135162354
}
] |
typescript
|
(env, trigger) => {
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
|
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
|
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/tracer.ts",
"retrieved_chunk": "import { sanitizeAttributes } from '@opentelemetry/core'\nimport { Resource } from '@opentelemetry/resources'\nimport { SpanProcessor, RandomIdGenerator, ReadableSpan, SamplingDecision } from '@opentelemetry/sdk-trace-base'\nimport { SpanImpl } from './span.js'\nimport { getActiveConfig } from './config.js'\nexport class WorkerTracer implements Tracer {\n\tprivate readonly _spanProcessor: SpanProcessor\n\tprivate readonly resource: Resource\n\tprivate readonly idGenerator: RandomIdGenerator = new RandomIdGenerator()\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {",
"score": 0.8483787775039673
},
{
"filename": "src/provider.ts",
"retrieved_chunk": " * @param config Configuration object for SDK registration\n */\nexport class WorkerTracerProvider implements TracerProvider {\n\tprivate spanProcessor: SpanProcessor\n\tprivate resource: Resource\n\tprivate tracers: Record<string, Tracer> = {}\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {\n\t\tthis.spanProcessor = spanProcessor\n\t\tthis.resource = resource\n\t}",
"score": 0.8423652648925781
},
{
"filename": "src/provider.ts",
"retrieved_chunk": "\tgetTracer(name: string, version?: string, options?: TracerOptions): Tracer {\n\t\tconst key = `${name}@${version || ''}:${options?.schemaUrl || ''}`\n\t\tif (!this.tracers[key]) {\n\t\t\tthis.tracers[key] = new WorkerTracer(this.spanProcessor, this.resource)\n\t\t}\n\t\treturn this.tracers[key]\n\t}\n\tregister(): void {\n\t\ttrace.setGlobalTracerProvider(this)\n\t\tcontext.setGlobalContextManager(new AsyncLocalStorageContextManager())",
"score": 0.8340478539466858
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "} from '@opentelemetry/api'\nimport { SemanticAttributes } from '@opentelemetry/semantic-conventions'\nimport { Initialiser, getActiveConfig, setConfig } from '../config.js'\nimport { wrap } from '../wrap.js'\nimport { instrumentEnv } from './env.js'\nimport { exportSpans, proxyExecutionContext } from './common.js'\nimport { ResolvedTraceConfig } from '../types.js'\nexport type IncludeTraceContextFn = (request: Request) => boolean\nexport interface FetcherConfig {\n\tincludeTraceContext?: boolean | IncludeTraceContextFn",
"score": 0.8199702501296997
},
{
"filename": "src/provider.ts",
"retrieved_chunk": "import { context, trace, Tracer, TracerOptions, TracerProvider } from '@opentelemetry/api'\nimport { SpanProcessor } from '@opentelemetry/sdk-trace-base'\nimport { Resource } from '@opentelemetry/resources'\nimport { AsyncLocalStorageContextManager } from './context.js'\nimport { WorkerTracer } from './tracer.js'\n/**\n * Register this TracerProvider for use with the OpenTelemetry API.\n * Undefined values may be replaced with defaults, and\n * null values will be skipped.\n *",
"score": 0.8174639344215393
}
] |
typescript
|
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
|
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
|
const env = instrumentEnv(orig_env)
const createDO = () => {
|
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
|
src/instrumentation/do.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/sdk.ts",
"retrieved_chunk": "}\nexport function instrumentDO(doClass: DOClass, config: ConfigurationOption) {\n\tconst initialiser = createInitialiser(config)\n\treturn instrumentDOClass(doClass, initialiser)\n}\nexport { waitUntilTrace } from './instrumentation/fetch.js'",
"score": 0.8482372760772705
},
{
"filename": "src/types.ts",
"retrieved_chunk": "}\nexport interface ResolvedTraceConfig extends TraceConfig {\n\texporter: SpanExporter\n\thandlers: Required<HandlerConfig>\n\tfetch: Required<FetcherConfig>\n\tpostProcessor: PostProcessorFn\n\tsampling: Required<SamplingConfig<Sampler>>\n}\nexport interface DOConstructorTrigger {\n\tid: string",
"score": 0.8011594414710999
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "}\nfunction createInitialiser(config: ConfigurationOption): Initialiser {\n\tif (typeof config === 'function') {\n\t\treturn (env, trigger) => {\n\t\t\tconst conf = parseConfig(config(env, trigger))\n\t\t\tinit(conf)\n\t\t\treturn conf\n\t\t}\n\t} else {\n\t\treturn () => {",
"score": 0.798170804977417
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\tapply: async (target, _thisArg, argArray: Parameters<FetchHandler>): Promise<Response> => {\n\t\t\tconst [request, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, request)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: FetchHandlerArgs = [request, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeFetchHandler, undefined, target, args)\n\t\t\t} catch (error) {",
"score": 0.7954208254814148
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "import { instrumentGlobalCache } from './instrumentation/cache.js'\nimport { createQueueHandler } from './instrumentation/queue.js'\nimport { DOClass, instrumentDOClass } from './instrumentation/do.js'\ntype FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>\ntype QueueHandler = ExportedHandlerQueueHandler\nexport type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig\nexport type ConfigurationOption = TraceConfig | ResolveConfigFn\nexport function isRequest(trigger: Trigger): trigger is Request {\n\treturn trigger instanceof Request\n}",
"score": 0.7947711944580078
}
] |
typescript
|
const env = instrumentEnv(orig_env)
const createDO = () => {
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function
|
isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
|
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/types.ts",
"retrieved_chunk": "import { ReadableSpan, Sampler, SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { OTLPExporterConfig } from './exporter.js'\nimport { FetchHandlerConfig, FetcherConfig } from './instrumentation/fetch.js'\nimport { TailSampleFn } from './sampling.js'\nexport type PostProcessorFn = (spans: ReadableSpan[]) => ReadableSpan[]\nexport type ExporterConfig = OTLPExporterConfig | SpanExporter\nexport interface HandlerConfig {\n\tfetch?: FetchHandlerConfig\n}\nexport interface ServiceConfig {",
"score": 0.8327980041503906
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "import { sanitizeAttributes } from '@opentelemetry/core'\nimport { Resource } from '@opentelemetry/resources'\nimport { SpanProcessor, RandomIdGenerator, ReadableSpan, SamplingDecision } from '@opentelemetry/sdk-trace-base'\nimport { SpanImpl } from './span.js'\nimport { getActiveConfig } from './config.js'\nexport class WorkerTracer implements Tracer {\n\tprivate readonly _spanProcessor: SpanProcessor\n\tprivate readonly resource: Resource\n\tprivate readonly idGenerator: RandomIdGenerator = new RandomIdGenerator()\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {",
"score": 0.8290787935256958
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tthis._spanProcessor = spanProcessor\n\t\tthis.resource = resource\n\t}\n\tget spanProcessor() {\n\t\treturn this._spanProcessor\n\t}\n\taddToResource(extra: Resource) {\n\t\tthis.resource.merge(extra)\n\t}\n\tstartSpan(name: string, options: SpanOptions = {}, context = api_context.active()): Span {",
"score": 0.8195885419845581
},
{
"filename": "src/provider.ts",
"retrieved_chunk": " * @param config Configuration object for SDK registration\n */\nexport class WorkerTracerProvider implements TracerProvider {\n\tprivate spanProcessor: SpanProcessor\n\tprivate resource: Resource\n\tprivate tracers: Record<string, Tracer> = {}\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {\n\t\tthis.spanProcessor = spanProcessor\n\t\tthis.resource = resource\n\t}",
"score": 0.8187354207038879
},
{
"filename": "src/span.ts",
"retrieved_chunk": "} from '@opentelemetry/core'\nimport { IResource } from '@opentelemetry/resources'\nimport { ReadableSpan, TimedEvent } from '@opentelemetry/sdk-trace-base'\nimport { SemanticAttributes } from '@opentelemetry/semantic-conventions'\ntype OnSpanEnd = (span: Span) => void\ninterface SpanInit {\n\tattributes: unknown\n\tname: string\n\tonEnd: OnSpanEnd\n\tresource: IResource",
"score": 0.8143826127052307
}
] |
typescript
|
isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
|
import { Context, Span } from '@opentelemetry/api'
import { ReadableSpan, SpanExporter, SpanProcessor } from '@opentelemetry/sdk-trace-base'
import { ExportResult, ExportResultCode } from '@opentelemetry/core'
import { Action, State, stateMachine } from 'ts-checked-fsm'
import { getActiveConfig } from './config.js'
import { TailSampleFn } from './sampling.js'
import { PostProcessorFn } from './types.js'
type CompletedTrace = {
traceId: string
localRootSpan: ReadableSpan
completedSpans: ReadableSpan[]
}
type InProgressTrace = {
inProgressSpanIds: Set<string>
} & CompletedTrace
type InitialState = State<'not_started'>
type InProgressTraceState = State<'in_progress', InProgressTrace>
type TraceCompleteState = State<'trace_complete', CompletedTrace>
type ExportingState = State<'exporting', { promise: Promise<ExportResult> }>
type DoneState = State<'done'>
type StartExportArguments = {
exporter: SpanExporter
tailSampler: TailSampleFn
postProcessor: PostProcessorFn
}
type StartSpanAction = Action<'startSpan', { span: Span }>
type EndSpanAction = Action<'endSpan', { span: ReadableSpan }>
type StartExportAction = Action<'startExport', { args: StartExportArguments }>
function newTrace(currentState: InitialState, { span }: StartSpanAction): InProgressTraceState {
const spanId = span.spanContext().spanId
return {
...currentState,
stateName: 'in_progress',
traceId: span.spanContext().traceId,
localRootSpan: span as unknown as ReadableSpan,
completedSpans: [] as ReadableSpan[],
inProgressSpanIds: new Set([spanId]),
} as const
}
function newSpan(currentState: InProgressTraceState, { span }: StartSpanAction): InProgressTraceState {
const spanId = span.spanContext().spanId
currentState.inProgressSpanIds.add(spanId)
return { ...currentState }
}
function endSpan(
currentState: InProgressTraceState,
{ span }: EndSpanAction
): InProgressTraceState | TraceCompleteState {
currentState.completedSpans.push(span)
currentState.inProgressSpanIds.delete(span.spanContext().spanId)
if (currentState.inProgressSpanIds.size === 0) {
return {
stateName: 'trace_complete',
traceId: currentState.traceId,
localRootSpan: currentState.localRootSpan,
completedSpans: currentState.completedSpans,
} as const
} else {
return { ...currentState }
}
}
function startExport(currentState: TraceCompleteState, { args }: StartExportAction): ExportingState | DoneState {
const { exporter, tailSampler, postProcessor } = args
const { traceId, localRootSpan, completedSpans: spans } = currentState
const shouldExport = tailSampler({ traceId, localRootSpan, spans })
if (shouldExport) {
const exportSpans = postProcessor(spans)
const promise = new Promise<ExportResult>((resolve) => {
exporter.export(exportSpans, resolve)
})
return { stateName: 'exporting', promise }
} else {
return { stateName: 'done' }
}
}
const { nextState } = stateMachine()
.state('not_started')
.state<'in_progress', InProgressTraceState>('in_progress')
.state<'trace_complete', TraceCompleteState>('trace_complete')
.state<'exporting', ExportingState>('exporting')
.state('done')
.transition('not_started', 'in_progress')
.transition('in_progress', 'in_progress')
.transition('in_progress', 'trace_complete')
.transition('trace_complete', 'exporting')
.transition('trace_complete', 'done')
.transition('exporting', 'done')
.action<'startSpan', StartSpanAction>('startSpan')
.action<'endSpan', EndSpanAction>('endSpan')
.action<'startExport', StartExportAction>('startExport')
.action('exportDone')
.actionHandler('not_started', 'startSpan', newTrace)
.actionHandler('in_progress', 'startSpan', newSpan)
.actionHandler('in_progress', 'endSpan', endSpan)
.actionHandler('trace_complete', 'startExport', startExport)
.actionHandler('exporting', 'exportDone', (_c, _a) => {
return { stateName: 'done' } as const
})
.done()
type AnyTraceState = Parameters<typeof nextState>[0]
type AnyTraceAction = Parameters<typeof nextState>[1]
export class BatchTraceSpanProcessor implements SpanProcessor {
private traces: Map<string, AnyTraceState> = new Map()
private inprogressExports: Map<string, Promise<ExportResult>> = new Map()
private action(traceId: string, action: AnyTraceAction): AnyTraceState {
const state = this.traces.get(traceId) || { stateName: 'not_started' }
const newState = nextState(state, action)
if (newState.stateName === 'done') {
this.traces.delete(traceId)
} else {
this.traces.set(traceId, newState)
}
return newState
}
private export(traceId: string) {
const { exporter
|
, sampling, postProcessor } = getActiveConfig()
const exportArgs = { exporter, tailSampler: sampling.tailSampler, postProcessor }
|
const newState = this.action(traceId, { actionName: 'startExport', args: exportArgs })
if (newState.stateName === 'exporting') {
const promise = newState.promise
this.inprogressExports.set(traceId, promise)
promise.then((result) => {
if (result.code === ExportResultCode.FAILED) {
console.log('Error sending spans to exporter:', result.error)
}
this.action(traceId, { actionName: 'exportDone' })
this.inprogressExports.delete(traceId)
})
}
}
onStart(span: Span, _parentContext: Context): void {
const traceId = span.spanContext().traceId
this.action(traceId, { actionName: 'startSpan', span })
}
onEnd(span: ReadableSpan): void {
const traceId = span.spanContext().traceId
const state = this.action(traceId, { actionName: 'endSpan', span })
if (state.stateName === 'trace_complete') {
this.export(traceId)
}
}
async forceFlush(): Promise<void> {
await Promise.allSettled(this.inprogressExports.values())
}
async shutdown(): Promise<void> {}
}
|
src/spanprocessor.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 0.8059735298156738
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tif (options.root) {\n\t\t\tcontext = trace.deleteSpan(context)\n\t\t}\n\t\tconst parentSpan = trace.getSpan(context)\n\t\tconst parentSpanContext = parentSpan?.spanContext()\n\t\tconst hasParentContext = parentSpanContext && trace.isSpanContextValid(parentSpanContext)\n\t\tconst traceId = hasParentContext ? parentSpanContext.traceId : this.idGenerator.generateTraceId()\n\t\tconst spanKind = options.kind || SpanKind.INTERNAL\n\t\tconst sanitisedAttrs = sanitizeAttributes(options.attributes)\n\t\tconst sampler = getActiveConfig().sampling.headSampler",
"score": 0.8026295900344849
},
{
"filename": "src/instrumentation/common.ts",
"retrieved_chunk": "export async function exportSpans(tracker?: PromiseTracker) {\n\tconst tracer = trace.getTracer('export')\n\tif (tracer instanceof WorkerTracer) {\n\t\tawait scheduler.wait(1)\n\t\tif (tracker) {\n\t\t\tawait tracker.wait()\n\t\t}\n\t\tawait tracer.spanProcessor.forceFlush()\n\t} else {\n\t\tconsole.error('The global tracer is not of type WorkerTracer and can not export spans')",
"score": 0.7982977032661438
},
{
"filename": "src/config.ts",
"retrieved_chunk": "import { context } from '@opentelemetry/api'\nimport { ResolvedTraceConfig, Trigger } from './types.js'\nconst configSymbol = Symbol('Otel Workers Tracing Configuration')\nexport type Initialiser = (env: Record<string, unknown>, trigger: Trigger) => ResolvedTraceConfig\nexport function setConfig(config: ResolvedTraceConfig, ctx = context.active()) {\n\treturn ctx.setValue(configSymbol, config)\n}\nexport function getActiveConfig(): ResolvedTraceConfig {\n\tconst config = context.active().getValue(configSymbol) as ResolvedTraceConfig\n\treturn config",
"score": 0.796678900718689
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "\tTraceIdRatioBasedSampler,\n} from '@opentelemetry/sdk-trace-base'\nimport { Initialiser } from './config.js'\nimport { OTLPExporter } from './exporter.js'\nimport { WorkerTracerProvider } from './provider.js'\nimport { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'\nimport { BatchTraceSpanProcessor } from './spanprocessor.js'\nimport { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'\nimport { unwrap } from './wrap.js'\nimport { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'",
"score": 0.7920678853988647
}
] |
typescript
|
, sampling, postProcessor } = getActiveConfig()
const exportArgs = { exporter, tailSampler: sampling.tailSampler, postProcessor }
|
import {
Tracer,
TraceFlags,
Span,
SpanKind,
SpanOptions,
Context,
context as api_context,
trace,
} from '@opentelemetry/api'
import { sanitizeAttributes } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SpanProcessor, RandomIdGenerator, ReadableSpan, SamplingDecision } from '@opentelemetry/sdk-trace-base'
import { SpanImpl } from './span.js'
import { getActiveConfig } from './config.js'
export class WorkerTracer implements Tracer {
private readonly _spanProcessor: SpanProcessor
private readonly resource: Resource
private readonly idGenerator: RandomIdGenerator = new RandomIdGenerator()
constructor(spanProcessor: SpanProcessor, resource: Resource) {
this._spanProcessor = spanProcessor
this.resource = resource
}
get spanProcessor() {
return this._spanProcessor
}
addToResource(extra: Resource) {
this.resource.merge(extra)
}
startSpan(name: string, options: SpanOptions = {}, context = api_context.active()): Span {
if (options.root) {
context = trace.deleteSpan(context)
}
const parentSpan = trace.getSpan(context)
const parentSpanContext = parentSpan?.spanContext()
const hasParentContext = parentSpanContext && trace.isSpanContextValid(parentSpanContext)
const traceId = hasParentContext ? parentSpanContext.traceId : this.idGenerator.generateTraceId()
const spanKind = options.kind || SpanKind.INTERNAL
const sanitisedAttrs = sanitizeAttributes(options.attributes)
const sampler = getActiveConfig().sampling.headSampler
const samplingDecision = sampler.shouldSample(context, traceId, name, spanKind, sanitisedAttrs, [])
const { decision, traceState, attributes: attrs } = samplingDecision
const attributes = Object.assign({}, sanitisedAttrs, attrs)
const spanId = this.idGenerator.generateSpanId()
const parentSpanId = hasParentContext ? parentSpanContext.spanId : undefined
const traceFlags = decision === SamplingDecision.RECORD_AND_SAMPLED ? TraceFlags.SAMPLED : TraceFlags.NONE
const spanContext = { traceId, spanId, traceFlags, traceState }
const span =
|
new SpanImpl({
|
attributes,
name,
onEnd: (span) => {
this.spanProcessor.onEnd(span as unknown as ReadableSpan)
},
resource: this.resource,
spanContext,
parentSpanId,
spanKind,
startTime: options.startTime,
})
//Do not get me started on the idosyncracies of the Otel JS libraries.
//@ts-ignore
this.spanProcessor.onStart(span, context)
return span
}
startActiveSpan<F extends (span: Span) => ReturnType<F>>(name: string, fn: F): ReturnType<F>
startActiveSpan<F extends (span: Span) => ReturnType<F>>(name: string, options: SpanOptions, fn: F): ReturnType<F>
startActiveSpan<F extends (span: Span) => ReturnType<F>>(
name: string,
options: SpanOptions,
context: Context,
fn: F
): ReturnType<F>
startActiveSpan<F extends (span: Span) => ReturnType<F>>(name: string, ...args: unknown[]): ReturnType<F> {
const options = args.length > 1 ? (args[0] as SpanOptions) : undefined
const parentContext = args.length > 2 ? (args[1] as Context) : api_context.active()
const fn = args[args.length - 1] as F
const span = this.startSpan(name, options, parentContext)
const contextWithSpanSet = trace.setSpan(parentContext, span)
return api_context.with(contextWithSpanSet, fn, undefined, span)
}
}
|
src/tracer.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tconst options: SpanOptions = { kind: SpanKind.CLIENT, attributes: attrs }\n\t\t\tconst host = new URL(request.url).host\n\t\t\tconst spanName = typeof attrs?.['name'] === 'string' ? attrs?.['name'] : `fetch: ${host}`\n\t\t\tconst promise = tracer.startActiveSpan(spanName, options, async (span) => {\n\t\t\t\tconst includeTraceContext =\n\t\t\t\t\ttypeof config.includeTraceContext === 'function'\n\t\t\t\t\t\t? config.includeTraceContext(request)\n\t\t\t\t\t\t: config.includeTraceContext\n\t\t\t\tif (includeTraceContext ?? true) {\n\t\t\t\t\tpropagation.inject(api_context.active(), request.headers, {",
"score": 0.8579999208450317
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'\nimport { SemanticAttributes } from '@opentelemetry/semantic-conventions'\nimport { passthroughGet, unwrap, wrap } from '../wrap.js'\nimport {\n\tgetParentContextFromHeaders,\n\tgatherIncomingCfAttributes,\n\tgatherRequestAttributes,\n\tgatherResponseAttributes,\n\tinstrumentFetcher,\n} from './fetch.js'",
"score": 0.8459076881408691
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "\t\t\t\tacceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,\n\t\t\t},\n\t\t},\n\t\tpostProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),\n\t\tsampling: {\n\t\t\theadSampler,\n\t\t\ttailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),\n\t\t},\n\t\tservice: supplied.service,\n\t}",
"score": 0.839218020439148
},
{
"filename": "src/span.ts",
"retrieved_chunk": "}\nexport class SpanImpl implements Span, ReadableSpan {\n\tname: string\n\tprivate readonly _spanContext: SpanContext\n\tprivate readonly onEnd: OnSpanEnd\n\treadonly parentSpanId?: string\n\treadonly kind: SpanKind\n\treadonly attributes: Attributes\n\tstatus: SpanStatus = {\n\t\tcode: SpanStatusCode.UNSET,",
"score": 0.838865339756012
},
{
"filename": "src/span.ts",
"retrieved_chunk": "\tprivate _droppedEventsCount: number = 0\n\tprivate _droppedLinksCount: number = 0\n\tconstructor(init: SpanInit) {\n\t\tthis.name = init.name\n\t\tthis._spanContext = init.spanContext\n\t\tthis.parentSpanId = init.parentSpanId\n\t\tthis.kind = init.spanKind || SpanKind.INTERNAL\n\t\tthis.attributes = sanitizeAttributes(init.attributes)\n\t\tthis.startTime = getHrTime(init.startTime)\n\t\tthis.links = init.links || []",
"score": 0.8381459712982178
}
] |
typescript
|
new SpanImpl({
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
|
handler.queue = createQueueHandler(queuer, initialiser)
}
|
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createFetchHandler(fetchFn: FetchHandler, initialiser: Initialiser) {\n\tconst fetchHandler: ProxyHandler<FetchHandler> = {",
"score": 0.8128385543823242
},
{
"filename": "src/types.ts",
"retrieved_chunk": "import { ReadableSpan, Sampler, SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { OTLPExporterConfig } from './exporter.js'\nimport { FetchHandlerConfig, FetcherConfig } from './instrumentation/fetch.js'\nimport { TailSampleFn } from './sampling.js'\nexport type PostProcessorFn = (spans: ReadableSpan[]) => ReadableSpan[]\nexport type ExporterConfig = OTLPExporterConfig | SpanExporter\nexport interface HandlerConfig {\n\tfetch?: FetchHandlerConfig\n}\nexport interface ServiceConfig {",
"score": 0.8109713196754456
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\tcount.retryRemaining()\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createQueueHandler(queueFn: QueueHandler, initialiser: Initialiser) {\n\tconst queueHandler: ProxyHandler<QueueHandler> = {\n\t\tasync apply(target, _thisArg, argArray: Parameters<QueueHandler>): Promise<void> {",
"score": 0.8094029426574707
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tfetchFn: Fetcher['fetch'],\n\tconfigFn: getFetchConfig,\n\tattrs?: Attributes\n): Fetcher['fetch'] {\n\tconst handler: ProxyHandler<typeof fetch> = {\n\t\tapply: (target, thisArg, argArray): ReturnType<typeof fetch> => {\n\t\t\tconst workerConfig = getActiveConfig()\n\t\t\tconst config = configFn(workerConfig)\n\t\t\tconst request = new Request(argArray[0], argArray[1])\n\t\t\tconst tracer = trace.getTracer('fetcher')",
"score": 0.8089087009429932
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\tconst fetchHandler: ProxyHandler<FetchFn> = {\n\t\tasync apply(target, thisArg, argArray: Parameters<FetchFn>) {\n\t\t\tconst request = argArray[0]\n\t\t\tconst config = initialiser(env, request)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst bound = target.bind(unwrap(thisArg))\n\t\t\t\treturn await api_context.with(context, executeDOFetch, undefined, bound, request, id)\n\t\t\t} catch (error) {\n\t\t\t\tthrow error",
"score": 0.8073922395706177
}
] |
typescript
|
handler.queue = createQueueHandler(queuer, initialiser)
}
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
|
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
|
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\treturn promise\n\t\t},\n\t}\n\treturn wrap(fetchFn, handler, true)\n}\nexport function instrumentGlobalFetch(): void {\n\tglobalThis.fetch = instrumentFetcher(globalThis.fetch, (config) => config.fetch)\n}",
"score": 0.8473687171936035
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t}\n\t\t},\n\t}\n\treturn wrap(alarmFn, alarmHandler)\n}\nfunction instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {\n\tconst objHandler: ProxyHandler<DurableObject> = {\n\t\tget(target, prop) {\n\t\t\tif (prop === 'fetch') {\n\t\t\t\tconst fetchFn = Reflect.get(target, prop)",
"score": 0.8293975591659546
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "import { instrumentEnv } from './env.js'\nimport { Initialiser, setConfig } from '../config.js'\nimport { exportSpans } from './common.js'\nimport { instrumentStorage } from './do-storage.js'\nimport { DOConstructorTrigger } from '../types.js'\ntype FetchFn = DurableObject['fetch']\ntype AlarmFn = DurableObject['alarm']\ntype Env = Record<string, unknown>\nfunction instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {\n\tconst stubHandler: ProxyHandler<typeof stub> = {",
"score": 0.8283259868621826
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t} finally {\n\t\t\t\texportSpans()\n\t\t\t}\n\t\t},\n\t}\n\treturn wrap(fetchFn, fetchHandler)\n}\nfunction instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {\n\tif (!alarmFn) return undefined\n\tconst alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {",
"score": 0.8280598521232605
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t\treturn instrumentFetchFn(fetchFn, initialiser, env, state.id)\n\t\t\t} else if (prop === 'alarm') {\n\t\t\t\tconst alarmFn = Reflect.get(target, prop)\n\t\t\t\treturn instrumentAlarmFn(alarmFn, initialiser, env, state.id)\n\t\t\t} else {\n\t\t\t\tconst result = Reflect.get(target, prop)\n\t\t\t\tif (typeof result === 'function') {\n\t\t\t\t\tresult.bind(doObj)\n\t\t\t\t}\n\t\t\t\treturn result",
"score": 0.8255418539047241
}
] |
typescript
|
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
|
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
|
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/types.ts",
"retrieved_chunk": "import { ReadableSpan, Sampler, SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { OTLPExporterConfig } from './exporter.js'\nimport { FetchHandlerConfig, FetcherConfig } from './instrumentation/fetch.js'\nimport { TailSampleFn } from './sampling.js'\nexport type PostProcessorFn = (spans: ReadableSpan[]) => ReadableSpan[]\nexport type ExporterConfig = OTLPExporterConfig | SpanExporter\nexport interface HandlerConfig {\n\tfetch?: FetchHandlerConfig\n}\nexport interface ServiceConfig {",
"score": 0.8525301814079285
},
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 0.8491162061691284
},
{
"filename": "src/types.ts",
"retrieved_chunk": "}\nexport interface ResolvedTraceConfig extends TraceConfig {\n\texporter: SpanExporter\n\thandlers: Required<HandlerConfig>\n\tfetch: Required<FetcherConfig>\n\tpostProcessor: PostProcessorFn\n\tsampling: Required<SamplingConfig<Sampler>>\n}\nexport interface DOConstructorTrigger {\n\tid: string",
"score": 0.8329057693481445
},
{
"filename": "src/exporter.ts",
"retrieved_chunk": "import { createExportTraceServiceRequest } from '@opentelemetry/otlp-transformer'\nimport { ExportServiceError, OTLPExporterError } from '@opentelemetry/otlp-exporter-base'\nimport { ExportResult, ExportResultCode } from '@opentelemetry/core'\nimport { SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { unwrap } from './wrap.js'\nexport interface OTLPExporterConfig {\n\turl: string\n\theaders?: Record<string, string>\n}\nconst defaultHeaders: Record<string, string> = {",
"score": 0.8325852155685425
},
{
"filename": "src/sampling.ts",
"retrieved_chunk": "\t\treturn samplers.reduce((result, sampler) => result || sampler(traceInfo), false)\n\t}\n}\nexport const isHeadSampled: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.spanContext().traceFlags === TraceFlags.SAMPLED\n}\nexport const isRootErrorSpan: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.status.code === SpanStatusCode.ERROR",
"score": 0.8291878700256348
}
] |
typescript
|
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.
|
tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
|
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tconst samplingDecision = sampler.shouldSample(context, traceId, name, spanKind, sanitisedAttrs, [])\n\t\tconst { decision, traceState, attributes: attrs } = samplingDecision\n\t\tconst attributes = Object.assign({}, sanitisedAttrs, attrs)\n\t\tconst spanId = this.idGenerator.generateSpanId()\n\t\tconst parentSpanId = hasParentContext ? parentSpanContext.spanId : undefined\n\t\tconst traceFlags = decision === SamplingDecision.RECORD_AND_SAMPLED ? TraceFlags.SAMPLED : TraceFlags.NONE\n\t\tconst spanContext = { traceId, spanId, traceFlags, traceState }\n\t\tconst span = new SpanImpl({\n\t\t\tattributes,\n\t\t\tname,",
"score": 0.8170538544654846
},
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 0.8143589496612549
},
{
"filename": "src/types.ts",
"retrieved_chunk": "import { ReadableSpan, Sampler, SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { OTLPExporterConfig } from './exporter.js'\nimport { FetchHandlerConfig, FetcherConfig } from './instrumentation/fetch.js'\nimport { TailSampleFn } from './sampling.js'\nexport type PostProcessorFn = (spans: ReadableSpan[]) => ReadableSpan[]\nexport type ExporterConfig = OTLPExporterConfig | SpanExporter\nexport interface HandlerConfig {\n\tfetch?: FetchHandlerConfig\n}\nexport interface ServiceConfig {",
"score": 0.8124961853027344
},
{
"filename": "src/sampling.ts",
"retrieved_chunk": "\t\treturn samplers.reduce((result, sampler) => result || sampler(traceInfo), false)\n\t}\n}\nexport const isHeadSampled: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.spanContext().traceFlags === TraceFlags.SAMPLED\n}\nexport const isRootErrorSpan: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.status.code === SpanStatusCode.ERROR",
"score": 0.8072227239608765
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\t\tonEnd: (span) => {\n\t\t\t\tthis.spanProcessor.onEnd(span as unknown as ReadableSpan)\n\t\t\t},\n\t\t\tresource: this.resource,\n\t\t\tspanContext,\n\t\t\tparentSpanId,\n\t\t\tspanKind,\n\t\t\tstartTime: options.startTime,\n\t\t})\n\t\t//Do not get me started on the idosyncracies of the Otel JS libraries.",
"score": 0.8030930757522583
}
] |
typescript
|
tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const
|
spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
|
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/provider.ts",
"retrieved_chunk": " * @param config Configuration object for SDK registration\n */\nexport class WorkerTracerProvider implements TracerProvider {\n\tprivate spanProcessor: SpanProcessor\n\tprivate resource: Resource\n\tprivate tracers: Record<string, Tracer> = {}\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {\n\t\tthis.spanProcessor = spanProcessor\n\t\tthis.resource = resource\n\t}",
"score": 0.8666049838066101
},
{
"filename": "src/provider.ts",
"retrieved_chunk": "\tgetTracer(name: string, version?: string, options?: TracerOptions): Tracer {\n\t\tconst key = `${name}@${version || ''}:${options?.schemaUrl || ''}`\n\t\tif (!this.tracers[key]) {\n\t\t\tthis.tracers[key] = new WorkerTracer(this.spanProcessor, this.resource)\n\t\t}\n\t\treturn this.tracers[key]\n\t}\n\tregister(): void {\n\t\ttrace.setGlobalTracerProvider(this)\n\t\tcontext.setGlobalContextManager(new AsyncLocalStorageContextManager())",
"score": 0.8633307218551636
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "import { sanitizeAttributes } from '@opentelemetry/core'\nimport { Resource } from '@opentelemetry/resources'\nimport { SpanProcessor, RandomIdGenerator, ReadableSpan, SamplingDecision } from '@opentelemetry/sdk-trace-base'\nimport { SpanImpl } from './span.js'\nimport { getActiveConfig } from './config.js'\nexport class WorkerTracer implements Tracer {\n\tprivate readonly _spanProcessor: SpanProcessor\n\tprivate readonly resource: Resource\n\tprivate readonly idGenerator: RandomIdGenerator = new RandomIdGenerator()\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {",
"score": 0.84334397315979
},
{
"filename": "src/provider.ts",
"retrieved_chunk": "import { context, trace, Tracer, TracerOptions, TracerProvider } from '@opentelemetry/api'\nimport { SpanProcessor } from '@opentelemetry/sdk-trace-base'\nimport { Resource } from '@opentelemetry/resources'\nimport { AsyncLocalStorageContextManager } from './context.js'\nimport { WorkerTracer } from './tracer.js'\n/**\n * Register this TracerProvider for use with the OpenTelemetry API.\n * Undefined values may be replaced with defaults, and\n * null values will be skipped.\n *",
"score": 0.8346774578094482
},
{
"filename": "src/instrumentation/common.ts",
"retrieved_chunk": "export async function exportSpans(tracker?: PromiseTracker) {\n\tconst tracer = trace.getTracer('export')\n\tif (tracer instanceof WorkerTracer) {\n\t\tawait scheduler.wait(1)\n\t\tif (tracker) {\n\t\t\tawait tracker.wait()\n\t\t}\n\t\tawait tracer.spanProcessor.forceFlush()\n\t} else {\n\t\tconsole.error('The global tracer is not of type WorkerTracer and can not export spans')",
"score": 0.829512357711792
}
] |
typescript
|
spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler:
|
Sampler | ParentRatioSamplingConfig): sampler is Sampler {
|
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/tracer.ts",
"retrieved_chunk": "import { sanitizeAttributes } from '@opentelemetry/core'\nimport { Resource } from '@opentelemetry/resources'\nimport { SpanProcessor, RandomIdGenerator, ReadableSpan, SamplingDecision } from '@opentelemetry/sdk-trace-base'\nimport { SpanImpl } from './span.js'\nimport { getActiveConfig } from './config.js'\nexport class WorkerTracer implements Tracer {\n\tprivate readonly _spanProcessor: SpanProcessor\n\tprivate readonly resource: Resource\n\tprivate readonly idGenerator: RandomIdGenerator = new RandomIdGenerator()\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {",
"score": 0.8473193049430847
},
{
"filename": "src/provider.ts",
"retrieved_chunk": " * @param config Configuration object for SDK registration\n */\nexport class WorkerTracerProvider implements TracerProvider {\n\tprivate spanProcessor: SpanProcessor\n\tprivate resource: Resource\n\tprivate tracers: Record<string, Tracer> = {}\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {\n\t\tthis.spanProcessor = spanProcessor\n\t\tthis.resource = resource\n\t}",
"score": 0.8464154005050659
},
{
"filename": "src/provider.ts",
"retrieved_chunk": "\tgetTracer(name: string, version?: string, options?: TracerOptions): Tracer {\n\t\tconst key = `${name}@${version || ''}:${options?.schemaUrl || ''}`\n\t\tif (!this.tracers[key]) {\n\t\t\tthis.tracers[key] = new WorkerTracer(this.spanProcessor, this.resource)\n\t\t}\n\t\treturn this.tracers[key]\n\t}\n\tregister(): void {\n\t\ttrace.setGlobalTracerProvider(this)\n\t\tcontext.setGlobalContextManager(new AsyncLocalStorageContextManager())",
"score": 0.8331180810928345
},
{
"filename": "src/provider.ts",
"retrieved_chunk": "import { context, trace, Tracer, TracerOptions, TracerProvider } from '@opentelemetry/api'\nimport { SpanProcessor } from '@opentelemetry/sdk-trace-base'\nimport { Resource } from '@opentelemetry/resources'\nimport { AsyncLocalStorageContextManager } from './context.js'\nimport { WorkerTracer } from './tracer.js'\n/**\n * Register this TracerProvider for use with the OpenTelemetry API.\n * Undefined values may be replaced with defaults, and\n * null values will be skipped.\n *",
"score": 0.8189574480056763
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tconst samplingDecision = sampler.shouldSample(context, traceId, name, spanKind, sanitisedAttrs, [])\n\t\tconst { decision, traceState, attributes: attrs } = samplingDecision\n\t\tconst attributes = Object.assign({}, sanitisedAttrs, attrs)\n\t\tconst spanId = this.idGenerator.generateSpanId()\n\t\tconst parentSpanId = hasParentContext ? parentSpanContext.spanId : undefined\n\t\tconst traceFlags = decision === SamplingDecision.RECORD_AND_SAMPLED ? TraceFlags.SAMPLED : TraceFlags.NONE\n\t\tconst spanContext = { traceId, spanId, traceFlags, traceState }\n\t\tconst span = new SpanImpl({\n\t\t\tattributes,\n\t\t\tname,",
"score": 0.8100060820579529
}
] |
typescript
|
Sampler | ParentRatioSamplingConfig): sampler is Sampler {
|
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config =
|
initialiser(env, request)
const context = setConfig(config)
try {
|
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
|
src/instrumentation/do.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\tapply: async (target, _thisArg, argArray: Parameters<FetchHandler>): Promise<Response> => {\n\t\t\tconst [request, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, request)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: FetchHandlerArgs = [request, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeFetchHandler, undefined, target, args)\n\t\t\t} catch (error) {",
"score": 0.8999061584472656
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\treturn promise\n\t\t},\n\t}\n\treturn wrap(fetchFn, handler, true)\n}\nexport function instrumentGlobalFetch(): void {\n\tglobalThis.fetch = instrumentFetcher(globalThis.fetch, (config) => config.fetch)\n}",
"score": 0.8881999254226685
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createFetchHandler(fetchFn: FetchHandler, initialiser: Initialiser) {\n\tconst fetchHandler: ProxyHandler<FetchHandler> = {",
"score": 0.8784299492835999
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tfetchFn: Fetcher['fetch'],\n\tconfigFn: getFetchConfig,\n\tattrs?: Attributes\n): Fetcher['fetch'] {\n\tconst handler: ProxyHandler<typeof fetch> = {\n\t\tapply: (target, thisArg, argArray): ReturnType<typeof fetch> => {\n\t\t\tconst workerConfig = getActiveConfig()\n\t\t\tconst config = configFn(workerConfig)\n\t\t\tconst request = new Request(argArray[0], argArray[1])\n\t\t\tconst tracer = trace.getTracer('fetcher')",
"score": 0.8721792697906494
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\tawait fn()\n\t\tspan.end()\n\t})\n}\nlet cold_start = true\nexport function executeFetchHandler(fetchFn: FetchHandler, [request, env, ctx]: FetchHandlerArgs): Promise<Response> {\n\tconst spanContext = getParentContextFromRequest(request)\n\tconst tracer = trace.getTracer('fetchHandler')\n\tconst attributes = {\n\t\t[SemanticAttributes.FAAS_TRIGGER]: 'http',",
"score": 0.8528221249580383
}
] |
typescript
|
initialiser(env, request)
const context = setConfig(config)
try {
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler
|
([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
|
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tconst samplingDecision = sampler.shouldSample(context, traceId, name, spanKind, sanitisedAttrs, [])\n\t\tconst { decision, traceState, attributes: attrs } = samplingDecision\n\t\tconst attributes = Object.assign({}, sanitisedAttrs, attrs)\n\t\tconst spanId = this.idGenerator.generateSpanId()\n\t\tconst parentSpanId = hasParentContext ? parentSpanContext.spanId : undefined\n\t\tconst traceFlags = decision === SamplingDecision.RECORD_AND_SAMPLED ? TraceFlags.SAMPLED : TraceFlags.NONE\n\t\tconst spanContext = { traceId, spanId, traceFlags, traceState }\n\t\tconst span = new SpanImpl({\n\t\t\tattributes,\n\t\t\tname,",
"score": 0.8184586763381958
},
{
"filename": "src/types.ts",
"retrieved_chunk": "import { ReadableSpan, Sampler, SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { OTLPExporterConfig } from './exporter.js'\nimport { FetchHandlerConfig, FetcherConfig } from './instrumentation/fetch.js'\nimport { TailSampleFn } from './sampling.js'\nexport type PostProcessorFn = (spans: ReadableSpan[]) => ReadableSpan[]\nexport type ExporterConfig = OTLPExporterConfig | SpanExporter\nexport interface HandlerConfig {\n\tfetch?: FetchHandlerConfig\n}\nexport interface ServiceConfig {",
"score": 0.8163052797317505
},
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 0.8162399530410767
},
{
"filename": "src/sampling.ts",
"retrieved_chunk": "import { TraceFlags, SpanStatusCode } from '@opentelemetry/api'\nimport { ReadableSpan } from '@opentelemetry/sdk-trace-base'\nexport interface LocalTrace {\n\treadonly traceId: string\n\treadonly localRootSpan: ReadableSpan\n\treadonly spans: ReadableSpan[]\n}\nexport type TailSampleFn = (traceInfo: LocalTrace) => boolean\nexport function multiTailSampler(samplers: TailSampleFn[]): TailSampleFn {\n\treturn (traceInfo) => {",
"score": 0.8100061416625977
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\t\tonEnd: (span) => {\n\t\t\t\tthis.spanProcessor.onEnd(span as unknown as ReadableSpan)\n\t\t\t},\n\t\t\tresource: this.resource,\n\t\t\tspanContext,\n\t\t\tparentSpanId,\n\t\t\tspanKind,\n\t\t\tstartTime: options.startTime,\n\t\t})\n\t\t//Do not get me started on the idosyncracies of the Otel JS libraries.",
"score": 0.8081529140472412
}
] |
typescript
|
([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
|
import { createExportTraceServiceRequest } from '@opentelemetry/otlp-transformer'
import { ExportServiceError, OTLPExporterError } from '@opentelemetry/otlp-exporter-base'
import { ExportResult, ExportResultCode } from '@opentelemetry/core'
import { SpanExporter } from '@opentelemetry/sdk-trace-base'
import { unwrap } from './wrap.js'
export interface OTLPExporterConfig {
url: string
headers?: Record<string, string>
}
const defaultHeaders: Record<string, string> = {
accept: 'application/json',
'content-type': 'application/json',
}
export class OTLPExporter implements SpanExporter {
private headers: Record<string, string>
private url: string
constructor(config: OTLPExporterConfig) {
this.url = config.url
this.headers = Object.assign({}, defaultHeaders, config.headers)
}
export(items: any[], resultCallback: (result: ExportResult) => void): void {
this._export(items)
.then(() => {
resultCallback({ code: ExportResultCode.SUCCESS })
})
.catch((error: ExportServiceError) => {
resultCallback({ code: ExportResultCode.FAILED, error })
})
}
private _export(items: any[]): Promise<unknown> {
return new Promise<void>((resolve, reject) => {
try {
this.send(items, resolve, reject)
} catch (e) {
reject(e)
}
})
}
send(items: any[], onSuccess: () => void, onError: (error: OTLPExporterError) => void): void {
const exportMessage = createExportTraceServiceRequest(items, true)
const body = JSON.stringify(exportMessage)
const params: RequestInit = {
method: 'POST',
headers: this.headers,
body,
}
unwrap(fetch)(this.url, params)
.then((response) => {
if (response.ok) {
onSuccess()
} else {
onError(new OTLPExporterError(`Exporter received a statusCode: ${response.status}`))
}
})
|
.catch((error) => {
|
onError(new OTLPExporterError(`Exception during export: ${error.toString()}`, error.code, error.stack))
})
}
async shutdown(): Promise<void> {}
}
|
src/exporter.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tconst promise = tracer.startActiveSpan('fetchHandler', options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request, env, ctx)\n\t\t\tif (response.status < 500) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response\n\t\t} catch (error) {",
"score": 0.8256748914718628
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\tconst name = id.name || ''\n\tconst promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request)\n\t\t\tif (response.ok) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response",
"score": 0.825542688369751
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\tconst fetchHandler: ProxyHandler<FetchFn> = {\n\t\tasync apply(target, thisArg, argArray: Parameters<FetchFn>) {\n\t\t\tconst request = argArray[0]\n\t\t\tconst config = initialiser(env, request)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst bound = target.bind(unwrap(thisArg))\n\t\t\t\treturn await api_context.with(context, executeDOFetch, undefined, bound, request, id)\n\t\t\t} catch (error) {\n\t\t\t\tthrow error",
"score": 0.796797513961792
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\t\t\tthis.inprogressExports.set(traceId, promise)\n\t\t\tpromise.then((result) => {\n\t\t\t\tif (result.code === ExportResultCode.FAILED) {\n\t\t\t\t\tconsole.log('Error sending spans to exporter:', result.error)\n\t\t\t\t}\n\t\t\t\tthis.action(traceId, { actionName: 'exportDone' })\n\t\t\t\tthis.inprogressExports.delete(traceId)\n\t\t\t})\n\t\t}\n\t}",
"score": 0.7884572744369507
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\tapply: async (target, _thisArg, argArray: Parameters<FetchHandler>): Promise<Response> => {\n\t\t\tconst [request, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, request)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: FetchHandlerArgs = [request, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeFetchHandler, undefined, target, args)\n\t\t\t} catch (error) {",
"score": 0.787909746170044
}
] |
typescript
|
.catch((error) => {
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env,
|
trigger) => {
|
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/config.ts",
"retrieved_chunk": "import { context } from '@opentelemetry/api'\nimport { ResolvedTraceConfig, Trigger } from './types.js'\nconst configSymbol = Symbol('Otel Workers Tracing Configuration')\nexport type Initialiser = (env: Record<string, unknown>, trigger: Trigger) => ResolvedTraceConfig\nexport function setConfig(config: ResolvedTraceConfig, ctx = context.active()) {\n\treturn ctx.setValue(configSymbol, config)\n}\nexport function getActiveConfig(): ResolvedTraceConfig {\n\tconst config = context.active().getValue(configSymbol) as ResolvedTraceConfig\n\treturn config",
"score": 0.7991199493408203
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t} catch (error) {\n\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nfunction instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {",
"score": 0.7986466884613037
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t} finally {\n\t\t\t\texportSpans()\n\t\t\t}\n\t\t},\n\t}\n\treturn wrap(fetchFn, fetchHandler)\n}\nfunction instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {\n\tif (!alarmFn) return undefined\n\tconst alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {",
"score": 0.7952252626419067
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t\tname: orig_state.id.name,\n\t\t\t}\n\t\t\tconst constructorConfig = initialiser(orig_env, trigger)\n\t\t\tconst context = setConfig(constructorConfig)\n\t\t\tconst state = instrumentState(orig_state)\n\t\t\tconst env = instrumentEnv(orig_env)\n\t\t\tconst createDO = () => {\n\t\t\t\treturn new target(state, env)\n\t\t\t}\n\t\t\tconst doObj = api_context.with(context, createDO)",
"score": 0.7926036715507507
},
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 0.787176251411438
}
] |
typescript
|
trigger) => {
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
|
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
|
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tconst options: SpanOptions = { kind: SpanKind.CLIENT, attributes: attrs }\n\t\t\tconst host = new URL(request.url).host\n\t\t\tconst spanName = typeof attrs?.['name'] === 'string' ? attrs?.['name'] : `fetch: ${host}`\n\t\t\tconst promise = tracer.startActiveSpan(spanName, options, async (span) => {\n\t\t\t\tconst includeTraceContext =\n\t\t\t\t\ttypeof config.includeTraceContext === 'function'\n\t\t\t\t\t\t? config.includeTraceContext(request)\n\t\t\t\t\t\t: config.includeTraceContext\n\t\t\t\tif (includeTraceContext ?? true) {\n\t\t\t\t\tpropagation.inject(api_context.active(), request.headers, {",
"score": 0.8500996828079224
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tconst samplingDecision = sampler.shouldSample(context, traceId, name, spanKind, sanitisedAttrs, [])\n\t\tconst { decision, traceState, attributes: attrs } = samplingDecision\n\t\tconst attributes = Object.assign({}, sanitisedAttrs, attrs)\n\t\tconst spanId = this.idGenerator.generateSpanId()\n\t\tconst parentSpanId = hasParentContext ? parentSpanContext.spanId : undefined\n\t\tconst traceFlags = decision === SamplingDecision.RECORD_AND_SAMPLED ? TraceFlags.SAMPLED : TraceFlags.NONE\n\t\tconst spanContext = { traceId, spanId, traceFlags, traceState }\n\t\tconst span = new SpanImpl({\n\t\t\tattributes,\n\t\t\tname,",
"score": 0.8444660902023315
},
{
"filename": "src/types.ts",
"retrieved_chunk": "import { ReadableSpan, Sampler, SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { OTLPExporterConfig } from './exporter.js'\nimport { FetchHandlerConfig, FetcherConfig } from './instrumentation/fetch.js'\nimport { TailSampleFn } from './sampling.js'\nexport type PostProcessorFn = (spans: ReadableSpan[]) => ReadableSpan[]\nexport type ExporterConfig = OTLPExporterConfig | SpanExporter\nexport interface HandlerConfig {\n\tfetch?: FetchHandlerConfig\n}\nexport interface ServiceConfig {",
"score": 0.8393086791038513
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "} from '@opentelemetry/api'\nimport { SemanticAttributes } from '@opentelemetry/semantic-conventions'\nimport { Initialiser, getActiveConfig, setConfig } from '../config.js'\nimport { wrap } from '../wrap.js'\nimport { instrumentEnv } from './env.js'\nimport { exportSpans, proxyExecutionContext } from './common.js'\nimport { ResolvedTraceConfig } from '../types.js'\nexport type IncludeTraceContextFn = (request: Request) => boolean\nexport interface FetcherConfig {\n\tincludeTraceContext?: boolean | IncludeTraceContextFn",
"score": 0.8365342020988464
},
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 0.8345382213592529
}
] |
typescript
|
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
|
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
|
const trigger: DOConstructorTrigger = {
|
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
|
src/instrumentation/do.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/sdk.ts",
"retrieved_chunk": "}\nexport function instrumentDO(doClass: DOClass, config: ConfigurationOption) {\n\tconst initialiser = createInitialiser(config)\n\treturn instrumentDOClass(doClass, initialiser)\n}\nexport { waitUntilTrace } from './instrumentation/fetch.js'",
"score": 0.857758641242981
},
{
"filename": "src/instrumentation/do-storage.ts",
"retrieved_chunk": "\treturn wrap(fn, fnHandler)\n}\nexport function instrumentStorage(storage: DurableObjectStorage): DurableObjectStorage {\n\tconst storageHandler: ProxyHandler<DurableObjectStorage> = {\n\t\tget: (target, prop, receiver) => {\n\t\t\tconst operation = String(prop)\n\t\t\tconst fn = Reflect.get(target, prop, receiver)\n\t\t\treturn instrumentStorageFn(fn, operation)\n\t\t},\n\t}",
"score": 0.8020938634872437
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "\t\t\t\treturn result\n\t\t\t})\n\t\t},\n\t}\n\treturn wrap(fn, handler)\n}\nfunction instrumentCache(cache: Cache, cacheName: string): Cache {\n\tconst handler: ProxyHandler<typeof cache> = {\n\t\tget(target, prop) {\n\t\t\tif (prop === 'delete' || prop === 'match' || prop === 'put') {",
"score": 0.7982714176177979
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createFetchHandler(fetchFn: FetchHandler, initialiser: Initialiser) {\n\tconst fetchHandler: ProxyHandler<FetchHandler> = {",
"score": 0.795231819152832
},
{
"filename": "src/instrumentation/kv.ts",
"retrieved_chunk": "\t\t\t\tspan.setAttribute('hasResult', !!result)\n\t\t\t\tspan.end()\n\t\t\t\treturn result\n\t\t\t})\n\t\t},\n\t}\n\treturn wrap(fn, fnHandler)\n}\nexport function instrumentKV(kv: KVNamespace, name: string): KVNamespace {\n\tconst kvHandler: ProxyHandler<KVNamespace> = {",
"score": 0.7935159206390381
}
] |
typescript
|
const trigger: DOConstructorTrigger = {
|
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const
|
result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
|
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
|
src/instrumentation/do.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/do-storage.ts",
"retrieved_chunk": "\treturn wrap(fn, fnHandler)\n}\nexport function instrumentStorage(storage: DurableObjectStorage): DurableObjectStorage {\n\tconst storageHandler: ProxyHandler<DurableObjectStorage> = {\n\t\tget: (target, prop, receiver) => {\n\t\t\tconst operation = String(prop)\n\t\t\tconst fn = Reflect.get(target, prop, receiver)\n\t\t\treturn instrumentStorageFn(fn, operation)\n\t\t},\n\t}",
"score": 0.8951129913330078
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "\t\t\t\treturn result\n\t\t\t})\n\t\t},\n\t}\n\treturn wrap(fn, handler)\n}\nfunction instrumentCache(cache: Cache, cacheName: string): Cache {\n\tconst handler: ProxyHandler<typeof cache> = {\n\t\tget(target, prop) {\n\t\t\tif (prop === 'delete' || prop === 'match' || prop === 'put') {",
"score": 0.8544164896011353
},
{
"filename": "src/instrumentation/env.ts",
"retrieved_chunk": "const isDurableObject = (item?: unknown): item is DurableObjectNamespace => {\n\treturn !!(item as DurableObjectNamespace)?.idFromName\n}\nconst instrumentEnv = (env: Record<string, unknown>): Record<string, unknown> => {\n\tconst envHandler: ProxyHandler<Record<string, unknown>> = {\n\t\tget: (target, prop, receiver) => {\n\t\t\tconst item = Reflect.get(target, prop, receiver)\n\t\t\tif (isKVNamespace(item)) {\n\t\t\t\treturn instrumentKV(item, String(prop))\n\t\t\t} else if (isQueue(item)) {",
"score": 0.8488166332244873
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "\tconst handler: ProxyHandler<typeof caches> = {\n\t\tget(target, prop) {\n\t\t\tif (prop === 'default') {\n\t\t\t\tconst cache = target.default\n\t\t\t\treturn instrumentCache(cache, 'default')\n\t\t\t} else if (prop === 'open') {\n\t\t\t\tconst openFn = Reflect.get(target, prop).bind(target)\n\t\t\t\treturn instrumentOpen(openFn)\n\t\t\t} else {\n\t\t\t\treturn Reflect.get(target, prop)",
"score": 0.8362857103347778
},
{
"filename": "src/instrumentation/kv.ts",
"retrieved_chunk": "\t\tget: (target, prop, receiver) => {\n\t\t\tconst operation = String(prop)\n\t\t\tconst fn = Reflect.get(target, prop, receiver)\n\t\t\treturn instrumentKVFn(fn, name, operation)\n\t\t},\n\t}\n\treturn wrap(kv, kvHandler)\n}",
"score": 0.8327197432518005
}
] |
typescript
|
result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter
|
) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
|
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/types.ts",
"retrieved_chunk": "import { ReadableSpan, Sampler, SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { OTLPExporterConfig } from './exporter.js'\nimport { FetchHandlerConfig, FetcherConfig } from './instrumentation/fetch.js'\nimport { TailSampleFn } from './sampling.js'\nexport type PostProcessorFn = (spans: ReadableSpan[]) => ReadableSpan[]\nexport type ExporterConfig = OTLPExporterConfig | SpanExporter\nexport interface HandlerConfig {\n\tfetch?: FetchHandlerConfig\n}\nexport interface ServiceConfig {",
"score": 0.8409332036972046
},
{
"filename": "src/sampling.ts",
"retrieved_chunk": "\t\treturn samplers.reduce((result, sampler) => result || sampler(traceInfo), false)\n\t}\n}\nexport const isHeadSampled: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.spanContext().traceFlags === TraceFlags.SAMPLED\n}\nexport const isRootErrorSpan: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.status.code === SpanStatusCode.ERROR",
"score": 0.8313568830490112
},
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 0.8228397965431213
},
{
"filename": "src/exporter.ts",
"retrieved_chunk": "import { createExportTraceServiceRequest } from '@opentelemetry/otlp-transformer'\nimport { ExportServiceError, OTLPExporterError } from '@opentelemetry/otlp-exporter-base'\nimport { ExportResult, ExportResultCode } from '@opentelemetry/core'\nimport { SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { unwrap } from './wrap.js'\nexport interface OTLPExporterConfig {\n\turl: string\n\theaders?: Record<string, string>\n}\nconst defaultHeaders: Record<string, string> = {",
"score": 0.8164165616035461
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tconst samplingDecision = sampler.shouldSample(context, traceId, name, spanKind, sanitisedAttrs, [])\n\t\tconst { decision, traceState, attributes: attrs } = samplingDecision\n\t\tconst attributes = Object.assign({}, sanitisedAttrs, attrs)\n\t\tconst spanId = this.idGenerator.generateSpanId()\n\t\tconst parentSpanId = hasParentContext ? parentSpanContext.spanId : undefined\n\t\tconst traceFlags = decision === SamplingDecision.RECORD_AND_SAMPLED ? TraceFlags.SAMPLED : TraceFlags.NONE\n\t\tconst spanContext = { traceId, spanId, traceFlags, traceState }\n\t\tconst span = new SpanImpl({\n\t\t\tattributes,\n\t\t\tname,",
"score": 0.81429523229599
}
] |
typescript
|
) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
|
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const
|
context = setConfig(config)
try {
|
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
|
src/instrumentation/do.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\tapply: async (target, _thisArg, argArray: Parameters<FetchHandler>): Promise<Response> => {\n\t\t\tconst [request, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, request)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: FetchHandlerArgs = [request, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeFetchHandler, undefined, target, args)\n\t\t\t} catch (error) {",
"score": 0.9006607532501221
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\treturn promise\n\t\t},\n\t}\n\treturn wrap(fetchFn, handler, true)\n}\nexport function instrumentGlobalFetch(): void {\n\tglobalThis.fetch = instrumentFetcher(globalThis.fetch, (config) => config.fetch)\n}",
"score": 0.8889800906181335
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createFetchHandler(fetchFn: FetchHandler, initialiser: Initialiser) {\n\tconst fetchHandler: ProxyHandler<FetchHandler> = {",
"score": 0.8732801675796509
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tfetchFn: Fetcher['fetch'],\n\tconfigFn: getFetchConfig,\n\tattrs?: Attributes\n): Fetcher['fetch'] {\n\tconst handler: ProxyHandler<typeof fetch> = {\n\t\tapply: (target, thisArg, argArray): ReturnType<typeof fetch> => {\n\t\t\tconst workerConfig = getActiveConfig()\n\t\t\tconst config = configFn(workerConfig)\n\t\t\tconst request = new Request(argArray[0], argArray[1])\n\t\t\tconst tracer = trace.getTracer('fetcher')",
"score": 0.8712959289550781
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\tawait fn()\n\t\tspan.end()\n\t})\n}\nlet cold_start = true\nexport function executeFetchHandler(fetchFn: FetchHandler, [request, env, ctx]: FetchHandlerArgs): Promise<Response> {\n\tconst spanContext = getParentContextFromRequest(request)\n\tconst tracer = trace.getTracer('fetchHandler')\n\tconst attributes = {\n\t\t[SemanticAttributes.FAAS_TRIGGER]: 'http',",
"score": 0.8531508445739746
}
] |
typescript
|
context = setConfig(config)
try {
|
import { trace, SpanOptions, SpanKind, Attributes, Exception, context as api_context } from '@opentelemetry/api'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans, proxyExecutionContext } from './common.js'
import { instrumentEnv } from './env.js'
import { unwrap, wrap } from '../wrap.js'
type QueueHandler = ExportedHandlerQueueHandler<unknown, unknown>
export type QueueHandlerArgs = Parameters<QueueHandler>
const traceIdSymbol = Symbol('traceId')
class MessageStatusCount {
succeeded = 0
failed = 0
readonly total: number
constructor(total: number) {
this.total = total
}
ack() {
this.succeeded = this.succeeded + 1
}
ackRemaining() {
this.succeeded = this.total - this.failed
}
retry() {
this.failed = this.failed + 1
}
retryRemaining() {
this.failed = this.total - this.succeeded
}
toAttributes(): Attributes {
return {
'queue.messages_count': this.total,
'queue.messages_success': this.succeeded,
'queue.messages_failed': this.failed,
'queue.batch_success': this.succeeded === this.total,
}
}
}
const addEvent = (name: string, msg?: Message) => {
const attrs: Attributes = {}
if (msg) {
attrs['queue.message_id'] = msg.id
attrs['queue.message_timestamp'] = msg.timestamp.toISOString()
}
trace.getActiveSpan()?.addEvent(name, attrs)
}
const proxyQueueMessage = <Q>(msg: Message<Q>, count: MessageStatusCount): Message<Q> => {
const msgHandler: ProxyHandler<Message<Q>> = {
get: (target, prop) => {
if (prop === 'ack') {
const ackFn = Reflect.get(target, prop)
return new Proxy(ackFn, {
apply: (fnTarget) => {
addEvent('messageAck', msg)
count.ack()
//TODO: handle errors
Reflect.apply(fnTarget, msg, [])
},
})
} else if (prop === 'retry') {
const retryFn = Reflect.get(target, prop)
return new Proxy(retryFn, {
apply: (fnTarget) => {
addEvent('messageRetry', msg)
count.retry()
//TODO: handle errors
const result = Reflect.apply(fnTarget, msg, [])
return result
},
})
} else {
return Reflect.get(target, prop, msg)
}
},
}
return wrap(msg, msgHandler)
}
const proxyMessageBatch = <E, Q>(batch: MessageBatch, count: MessageStatusCount) => {
const batchHandler: ProxyHandler<MessageBatch> = {
get: (target, prop) => {
if (prop === 'messages') {
const messages = Reflect.get(target, prop)
const messagesHandler: ProxyHandler<MessageBatch['messages']> = {
get: (target, prop) => {
if (typeof prop === 'string' && !isNaN(parseInt(prop))) {
const message = Reflect.get(target, prop)
return proxyQueueMessage(message, count)
} else {
return Reflect.get(target, prop)
}
},
}
return wrap(messages, messagesHandler)
} else if (prop === 'ackAll') {
const ackFn = Reflect.get(target, prop)
return new Proxy(ackFn, {
apply: (fnTarget) => {
addEvent('ackAll')
count.ackRemaining()
//TODO: handle errors
Reflect.apply(fnTarget, batch, [])
},
})
} else if (prop === 'retryAll') {
const retryFn = Reflect.get(target, prop)
return new Proxy(retryFn, {
apply: (fnTarget) => {
addEvent('retryAll')
count.retryRemaining()
//TODO: handle errors
Reflect.apply(fnTarget, batch, [])
},
})
}
return Reflect.get(target, prop)
},
}
return wrap(batch, batchHandler)
}
export function executeQueueHandler(queueFn: QueueHandler, [batch, env, ctx]: QueueHandlerArgs): Promise<void> {
const count = new MessageStatusCount(batch.messages.length)
batch = proxyMessageBatch(batch, count)
const tracer = trace.getTracer('queueHandler')
const options: SpanOptions = {
attributes: {
'queue.name': batch.queue,
},
kind: SpanKind.CONSUMER,
}
const promise = tracer.startActiveSpan(`queueHandler:${batch.queue}`, options, async (span) => {
const traceId = span.spanContext().traceId
api_context.active().setValue(traceIdSymbol, traceId)
try {
const result = queueFn(batch, env, ctx)
await span.setAttribute('queue.implicitly_acked', count.total - count.succeeded - count.failed)
count.ackRemaining()
span.setAttributes(count.toAttributes())
span.end()
return result
} catch (error) {
span.recordException(error as Exception)
span.setAttribute('queue.implicitly_retried', count.total - count.succeeded - count.failed)
count.retryRemaining()
span.end()
throw error
}
})
return promise
}
export function createQueueHandler(queueFn: QueueHandler, initialiser: Initialiser) {
const queueHandler: ProxyHandler<QueueHandler> = {
async apply(target, _thisArg, argArray: Parameters<QueueHandler>): Promise<void> {
const [batch, orig_env, orig_ctx] = argArray
const config = initialiser(orig_env as Record<string, unknown>, batch)
const env = instrumentEnv(orig_env as Record<string, unknown>)
const { ctx, tracker } = proxyExecutionContext(orig_ctx)
const context = setConfig(config)
try {
const args: QueueHandlerArgs = [batch, env, ctx]
return await api_context.with(context, executeQueueHandler, undefined, target, args)
} catch (error) {
throw error
} finally {
orig_ctx.waitUntil(exportSpans(tracker))
}
},
}
return wrap(queueFn, queueHandler)
}
function instrumentQueueSend(fn: Queue<unknown>['send'], name: string): Queue<unknown>['send'] {
const tracer = trace.getTracer('queueSender')
const handler: ProxyHandler<Queue<unknown>['send']> = {
apply: (target, thisArg, argArray) => {
return tracer.startActiveSpan(`queueSend: ${name}`, async (span) => {
span.setAttribute('queue.operation', 'send')
await Reflect.apply(target
|
, unwrap(thisArg), argArray)
span.end()
})
},
}
|
return wrap(fn, handler)
}
function instrumentQueueSendBatch(fn: Queue<unknown>['sendBatch'], name: string): Queue<unknown>['sendBatch'] {
const tracer = trace.getTracer('queueSender')
const handler: ProxyHandler<Queue<unknown>['sendBatch']> = {
apply: (target, thisArg, argArray) => {
return tracer.startActiveSpan(`queueSendBatch: ${name}`, async (span) => {
span.setAttribute('queue.operation', 'sendBatch')
await Reflect.apply(target, unwrap(thisArg), argArray)
span.end()
})
},
}
return wrap(fn, handler)
}
export function instrumentQueueSender(queue: Queue<unknown>, name: string) {
const queueHandler: ProxyHandler<Queue<unknown>> = {
get: (target, prop) => {
if (prop === 'send') {
const sendFn = Reflect.get(target, prop)
return instrumentQueueSend(sendFn, name)
} else if (prop === 'sendBatch') {
const sendFn = Reflect.get(target, prop)
return instrumentQueueSendBatch(sendFn, name)
} else {
return Reflect.get(target, prop)
}
},
}
return wrap(queue, queueHandler)
}
|
src/instrumentation/queue.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/kv.ts",
"retrieved_chunk": "\t\t\t\tattributes: {\n\t\t\t\t\tbinding_type: 'KV',\n\t\t\t\t\tkv_namespace: name,\n\t\t\t\t\toperation,\n\t\t\t\t},\n\t\t\t}\n\t\t\treturn tracer.startActiveSpan(`kv:${name}:${operation}`, options, async (span) => {\n\t\t\t\tconst result = await Reflect.apply(target, thisArg, argArray)\n\t\t\t\tconst extraAttrs = KVAttributes[operation] ? KVAttributes[operation](argArray, result) : {}\n\t\t\t\tspan.setAttributes(extraAttrs)",
"score": 0.8423916101455688
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "\t\t\t\t'http.url': argArray[0].url ? sanitiseURL(argArray[0].url) : undefined,\n\t\t\t\t'cache.operation': op,\n\t\t\t}\n\t\t\tconst options: SpanOptions = { kind: SpanKind.CLIENT, attributes }\n\t\t\treturn tracer.startActiveSpan(`cache:${cacheName}:${op}`, options, async (span) => {\n\t\t\t\tconst result = await Reflect.apply(target, thisArg, argArray)\n\t\t\t\tif (op === 'match') {\n\t\t\t\t\tspan.setAttribute('cache.hit', !result)\n\t\t\t\t}\n\t\t\t\tspan.end()",
"score": 0.8382392525672913
},
{
"filename": "src/instrumentation/do-storage.ts",
"retrieved_chunk": "\t\t\treturn tracer.startActiveSpan(`do:storage:${operation}`, options, async (span) => {\n\t\t\t\tconst result = await Reflect.apply(target, thisArg, argArray)\n\t\t\t\tconst extraAttrs = StorageAttributes[operation] ? StorageAttributes[operation](argArray, result) : {}\n\t\t\t\tspan.setAttributes(extraAttrs)\n\t\t\t\tspan.setAttribute('hasResult', !!result)\n\t\t\t\tspan.end()\n\t\t\t\treturn result\n\t\t\t})\n\t\t},\n\t}",
"score": 0.8368250131607056
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tfetchFn: Fetcher['fetch'],\n\tconfigFn: getFetchConfig,\n\tattrs?: Attributes\n): Fetcher['fetch'] {\n\tconst handler: ProxyHandler<typeof fetch> = {\n\t\tapply: (target, thisArg, argArray): ReturnType<typeof fetch> => {\n\t\t\tconst workerConfig = getActiveConfig()\n\t\t\tconst config = configFn(workerConfig)\n\t\t\tconst request = new Request(argArray[0], argArray[1])\n\t\t\tconst tracer = trace.getTracer('fetcher')",
"score": 0.8284573554992676
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\t\t\t\tset: (h, k, v) => h.set(k, typeof v === 'string' ? v : String(v)),\n\t\t\t\t\t})\n\t\t\t\t}\n\t\t\t\tspan.setAttributes(gatherRequestAttributes(request))\n\t\t\t\tif (request.cf) span.setAttributes(gatherOutgoingCfAttributes(request.cf))\n\t\t\t\tconst response: Response = await Reflect.apply(target, thisArg, [request])\n\t\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\t\tspan.end()\n\t\t\t\treturn response\n\t\t\t})",
"score": 0.8239879012107849
}
] |
typescript
|
, unwrap(thisArg), argArray)
span.end()
})
},
}
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
|
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
|
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 0.8307614326477051
},
{
"filename": "src/sampling.ts",
"retrieved_chunk": "\t\treturn samplers.reduce((result, sampler) => result || sampler(traceInfo), false)\n\t}\n}\nexport const isHeadSampled: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.spanContext().traceFlags === TraceFlags.SAMPLED\n}\nexport const isRootErrorSpan: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.status.code === SpanStatusCode.ERROR",
"score": 0.8296710252761841
},
{
"filename": "src/types.ts",
"retrieved_chunk": "import { ReadableSpan, Sampler, SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { OTLPExporterConfig } from './exporter.js'\nimport { FetchHandlerConfig, FetcherConfig } from './instrumentation/fetch.js'\nimport { TailSampleFn } from './sampling.js'\nexport type PostProcessorFn = (spans: ReadableSpan[]) => ReadableSpan[]\nexport type ExporterConfig = OTLPExporterConfig | SpanExporter\nexport interface HandlerConfig {\n\tfetch?: FetchHandlerConfig\n}\nexport interface ServiceConfig {",
"score": 0.8269666433334351
},
{
"filename": "src/exporter.ts",
"retrieved_chunk": "import { createExportTraceServiceRequest } from '@opentelemetry/otlp-transformer'\nimport { ExportServiceError, OTLPExporterError } from '@opentelemetry/otlp-exporter-base'\nimport { ExportResult, ExportResultCode } from '@opentelemetry/core'\nimport { SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { unwrap } from './wrap.js'\nexport interface OTLPExporterConfig {\n\turl: string\n\theaders?: Record<string, string>\n}\nconst defaultHeaders: Record<string, string> = {",
"score": 0.8128105401992798
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tconst samplingDecision = sampler.shouldSample(context, traceId, name, spanKind, sanitisedAttrs, [])\n\t\tconst { decision, traceState, attributes: attrs } = samplingDecision\n\t\tconst attributes = Object.assign({}, sanitisedAttrs, attrs)\n\t\tconst spanId = this.idGenerator.generateSpanId()\n\t\tconst parentSpanId = hasParentContext ? parentSpanContext.spanId : undefined\n\t\tconst traceFlags = decision === SamplingDecision.RECORD_AND_SAMPLED ? TraceFlags.SAMPLED : TraceFlags.NONE\n\t\tconst spanContext = { traceId, spanId, traceFlags, traceState }\n\t\tconst span = new SpanImpl({\n\t\t\tattributes,\n\t\t\tname,",
"score": 0.8111799359321594
}
] |
typescript
|
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
|
return (env, trigger) => {
|
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/config.ts",
"retrieved_chunk": "import { context } from '@opentelemetry/api'\nimport { ResolvedTraceConfig, Trigger } from './types.js'\nconst configSymbol = Symbol('Otel Workers Tracing Configuration')\nexport type Initialiser = (env: Record<string, unknown>, trigger: Trigger) => ResolvedTraceConfig\nexport function setConfig(config: ResolvedTraceConfig, ctx = context.active()) {\n\treturn ctx.setValue(configSymbol, config)\n}\nexport function getActiveConfig(): ResolvedTraceConfig {\n\tconst config = context.active().getValue(configSymbol) as ResolvedTraceConfig\n\treturn config",
"score": 0.8149979114532471
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t} finally {\n\t\t\t\texportSpans()\n\t\t\t}\n\t\t},\n\t}\n\treturn wrap(fetchFn, fetchHandler)\n}\nfunction instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {\n\tif (!alarmFn) return undefined\n\tconst alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {",
"score": 0.8090935349464417
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t} catch (error) {\n\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nfunction instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {",
"score": 0.8075236082077026
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t\tname: orig_state.id.name,\n\t\t\t}\n\t\t\tconst constructorConfig = initialiser(orig_env, trigger)\n\t\t\tconst context = setConfig(constructorConfig)\n\t\t\tconst state = instrumentState(orig_state)\n\t\t\tconst env = instrumentEnv(orig_env)\n\t\t\tconst createDO = () => {\n\t\t\t\treturn new target(state, env)\n\t\t\t}\n\t\t\tconst doObj = api_context.with(context, createDO)",
"score": 0.8041651844978333
},
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 0.8037738800048828
}
] |
typescript
|
return (env, trigger) => {
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
|
service: supplied.service,
}
|
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 0.8383843302726746
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tconst samplingDecision = sampler.shouldSample(context, traceId, name, spanKind, sanitisedAttrs, [])\n\t\tconst { decision, traceState, attributes: attrs } = samplingDecision\n\t\tconst attributes = Object.assign({}, sanitisedAttrs, attrs)\n\t\tconst spanId = this.idGenerator.generateSpanId()\n\t\tconst parentSpanId = hasParentContext ? parentSpanContext.spanId : undefined\n\t\tconst traceFlags = decision === SamplingDecision.RECORD_AND_SAMPLED ? TraceFlags.SAMPLED : TraceFlags.NONE\n\t\tconst spanContext = { traceId, spanId, traceFlags, traceState }\n\t\tconst span = new SpanImpl({\n\t\t\tattributes,\n\t\t\tname,",
"score": 0.8344578742980957
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tif (options.root) {\n\t\t\tcontext = trace.deleteSpan(context)\n\t\t}\n\t\tconst parentSpan = trace.getSpan(context)\n\t\tconst parentSpanContext = parentSpan?.spanContext()\n\t\tconst hasParentContext = parentSpanContext && trace.isSpanContextValid(parentSpanContext)\n\t\tconst traceId = hasParentContext ? parentSpanContext.traceId : this.idGenerator.generateTraceId()\n\t\tconst spanKind = options.kind || SpanKind.INTERNAL\n\t\tconst sanitisedAttrs = sanitizeAttributes(options.attributes)\n\t\tconst sampler = getActiveConfig().sampling.headSampler",
"score": 0.8308507800102234
},
{
"filename": "src/types.ts",
"retrieved_chunk": "import { ReadableSpan, Sampler, SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { OTLPExporterConfig } from './exporter.js'\nimport { FetchHandlerConfig, FetcherConfig } from './instrumentation/fetch.js'\nimport { TailSampleFn } from './sampling.js'\nexport type PostProcessorFn = (spans: ReadableSpan[]) => ReadableSpan[]\nexport type ExporterConfig = OTLPExporterConfig | SpanExporter\nexport interface HandlerConfig {\n\tfetch?: FetchHandlerConfig\n}\nexport interface ServiceConfig {",
"score": 0.8250670433044434
},
{
"filename": "src/sampling.ts",
"retrieved_chunk": "\t\treturn samplers.reduce((result, sampler) => result || sampler(traceInfo), false)\n\t}\n}\nexport const isHeadSampled: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.spanContext().traceFlags === TraceFlags.SAMPLED\n}\nexport const isRootErrorSpan: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.status.code === SpanStatusCode.ERROR",
"score": 0.8122071623802185
}
] |
typescript
|
service: supplied.service,
}
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher
|
= unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
|
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\treturn promise\n\t\t},\n\t}\n\treturn wrap(fetchFn, handler, true)\n}\nexport function instrumentGlobalFetch(): void {\n\tglobalThis.fetch = instrumentFetcher(globalThis.fetch, (config) => config.fetch)\n}",
"score": 0.8412199020385742
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t}\n\t\t},\n\t}\n\treturn wrap(alarmFn, alarmHandler)\n}\nfunction instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {\n\tconst objHandler: ProxyHandler<DurableObject> = {\n\t\tget(target, prop) {\n\t\t\tif (prop === 'fetch') {\n\t\t\t\tconst fetchFn = Reflect.get(target, prop)",
"score": 0.8254914283752441
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t} finally {\n\t\t\t\texportSpans()\n\t\t\t}\n\t\t},\n\t}\n\treturn wrap(fetchFn, fetchHandler)\n}\nfunction instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {\n\tif (!alarmFn) return undefined\n\tconst alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {",
"score": 0.8240920305252075
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "import { instrumentEnv } from './env.js'\nimport { Initialiser, setConfig } from '../config.js'\nimport { exportSpans } from './common.js'\nimport { instrumentStorage } from './do-storage.js'\nimport { DOConstructorTrigger } from '../types.js'\ntype FetchFn = DurableObject['fetch']\ntype AlarmFn = DurableObject['alarm']\ntype Env = Record<string, unknown>\nfunction instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {\n\tconst stubHandler: ProxyHandler<typeof stub> = {",
"score": 0.8238855600357056
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "\t\t\t}\n\t\t},\n\t}\n\t//@ts-ignore\n\tglobalThis.caches = wrap(caches, handler)\n}\nexport function instrumentGlobalCache() {\n\treturn _instrumentGlobalCache()\n}",
"score": 0.821286141872406
}
] |
typescript
|
= unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export
|
function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
|
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t}\n\t\t},\n\t}\n\treturn wrap(doObj, objHandler)\n}\nexport function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {\n\tconst classHandler: ProxyHandler<DOClass> = {\n\t\tconstruct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {\n\t\t\tconst trigger: DOConstructorTrigger = {\n\t\t\t\tid: orig_state.id.toString(),",
"score": 0.8824636936187744
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t}\n\t\t},\n\t}\n\treturn wrap(alarmFn, alarmHandler)\n}\nfunction instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {\n\tconst objHandler: ProxyHandler<DurableObject> = {\n\t\tget(target, prop) {\n\t\t\tif (prop === 'fetch') {\n\t\t\t\tconst fetchFn = Reflect.get(target, prop)",
"score": 0.8535492420196533
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "import { instrumentEnv } from './env.js'\nimport { Initialiser, setConfig } from '../config.js'\nimport { exportSpans } from './common.js'\nimport { instrumentStorage } from './do-storage.js'\nimport { DOConstructorTrigger } from '../types.js'\ntype FetchFn = DurableObject['fetch']\ntype AlarmFn = DurableObject['alarm']\ntype Env = Record<string, unknown>\nfunction instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {\n\tconst stubHandler: ProxyHandler<typeof stub> = {",
"score": 0.832856297492981
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t\treturn instrumentFetchFn(fetchFn, initialiser, env, state.id)\n\t\t\t} else if (prop === 'alarm') {\n\t\t\t\tconst alarmFn = Reflect.get(target, prop)\n\t\t\t\treturn instrumentAlarmFn(alarmFn, initialiser, env, state.id)\n\t\t\t} else {\n\t\t\t\tconst result = Reflect.get(target, prop)\n\t\t\t\tif (typeof result === 'function') {\n\t\t\t\t\tresult.bind(doObj)\n\t\t\t\t}\n\t\t\t\treturn result",
"score": 0.8325251340866089
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\treturn instrumentDurableObject(doObj, initialiser, env, state)\n\t\t},\n\t}\n\treturn wrap(doClass, classHandler)\n}",
"score": 0.831623375415802
}
] |
typescript
|
function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
|
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
|
return instrumentDOClass(doClass, initialiser)
}
|
export { waitUntilTrace } from './instrumentation/fetch.js'
|
src/sdk.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t}\n\t\t},\n\t}\n\treturn wrap(doObj, objHandler)\n}\nexport function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {\n\tconst classHandler: ProxyHandler<DOClass> = {\n\t\tconstruct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {\n\t\t\tconst trigger: DOConstructorTrigger = {\n\t\t\t\tid: orig_state.id.toString(),",
"score": 0.9008764028549194
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\treturn instrumentDurableObject(doObj, initialiser, env, state)\n\t\t},\n\t}\n\treturn wrap(doClass, classHandler)\n}",
"score": 0.8611741662025452
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t}\n\t\t},\n\t}\n\treturn wrap(alarmFn, alarmHandler)\n}\nfunction instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {\n\tconst objHandler: ProxyHandler<DurableObject> = {\n\t\tget(target, prop) {\n\t\t\tif (prop === 'fetch') {\n\t\t\t\tconst fetchFn = Reflect.get(target, prop)",
"score": 0.840584397315979
},
{
"filename": "src/instrumentation/env.ts",
"retrieved_chunk": "import { wrap } from '../wrap.js'\nimport { instrumentDOBinding } from './do.js'\nimport { instrumentKV } from './kv.js'\nimport { instrumentQueueSender } from './queue.js'\nconst isKVNamespace = (item?: unknown): item is KVNamespace => {\n\treturn !!(item as KVNamespace)?.getWithMetadata\n}\nconst isQueue = (item?: unknown): item is Queue<unknown> => {\n\treturn !!(item as Queue<unknown>)?.sendBatch\n}",
"score": 0.8312267661094666
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "import { instrumentEnv } from './env.js'\nimport { Initialiser, setConfig } from '../config.js'\nimport { exportSpans } from './common.js'\nimport { instrumentStorage } from './do-storage.js'\nimport { DOConstructorTrigger } from '../types.js'\ntype FetchFn = DurableObject['fetch']\ntype AlarmFn = DurableObject['alarm']\ntype Env = Record<string, unknown>\nfunction instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {\n\tconst stubHandler: ProxyHandler<typeof stub> = {",
"score": 0.8220100402832031
}
] |
typescript
|
return instrumentDOClass(doClass, initialiser)
}
|
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
|
instructionSet: InstructionSet,
program: AssembledProgram
) => {
|
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
if (hasInstruction && program.unmatchedLabels.length > 0) {
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program.matchedLabels[label.label];
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
const matches = labelRegex.exec(line);
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
await readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
const outputBuffer = outputInstructions(program, word16Align);
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
|
src/assembler.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": " await readByLines(path, (line, lineNumber) =>\n parseArchLine(line, lineNumber, instructionSet)\n );\n return instructionSet;\n};",
"score": 0.8384814262390137
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " program: AssembledProgram,\n word16Align: boolean\n): Option<Buffer> => {\n // This buffer stores each nibble of the program separately, and we will combine this later into the output buffer\n const threeNibbleBuffer: number[] = new Array(8192 * 3);\n // Fill array with 0xF\n for (let i = 0; i < threeNibbleBuffer.length; i++) {\n threeNibbleBuffer[i] = 0xf;\n }\n for (const instruction of program.matchedInstructions) {",
"score": 0.8287463188171387
},
{
"filename": "src/lib/disassembly.ts",
"retrieved_chunk": " if (lineLabel) {\n output += `\\n${lineLabel.name}:\\n`;\n }\n output += ` ${buildDisassembledInstructionString(\n instruction,\n immediateLabel\n )}\\n`;\n address += 1;\n }\n return output;",
"score": 0.8265746831893921
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " } else {\n // Label\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }\n console.log(`${label.address.toString(16)}`);\n opcode = label.address;\n }",
"score": 0.8264648914337158
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " return;\n }\n const sections = line.split(\";\");\n if (sections.length != 2) {\n log(\n \"Unexpected semicolon. Does this instruction have an output?\",\n lineNumber\n );\n return;\n }",
"score": 0.8256099820137024
}
] |
typescript
|
instructionSet: InstructionSet,
program: AssembledProgram
) => {
|
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
instructionSet: InstructionSet,
program: AssembledProgram
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
if (hasInstruction && program.unmatchedLabels.length > 0) {
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program.matchedLabels[label.label];
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
const matches = labelRegex.exec(line);
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
|
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
|
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
await readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
const outputBuffer = outputInstructions(program, word16Align);
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
|
src/assembler.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " } else {\n // Label\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }\n console.log(`${label.address.toString(16)}`);\n opcode = label.address;\n }",
"score": 0.8355630040168762
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " instruction.immediate\n );\n break;\n }\n case \"label\": {\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }",
"score": 0.8259110450744629
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " return;\n }\n const sections = line.split(\";\");\n if (sections.length != 2) {\n log(\n \"Unexpected semicolon. Does this instruction have an output?\",\n lineNumber\n );\n return;\n }",
"score": 0.8226152658462524
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " type: \"immediate\",\n regex: cleanAndFinishInstructionRegex(instructionLine),\n immediate: {\n bitCount,\n stringIndex: index,\n stringLength: matchString.length,\n },\n opcodeString,\n sortableOpcode,\n originalInstruction: originalInstruction.trim(),",
"score": 0.8149986267089844
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " });\n } else {\n // This is a literal\n const sortableOpcode = buildSortableOpcode(opcodeString, 0);\n config.instructions.push({\n type: \"literal\",\n regex: cleanAndFinishInstructionRegex(originalInstruction),\n opcodeString,\n sortableOpcode,\n originalInstruction: originalInstruction.trim(),",
"score": 0.8107741475105286
}
] |
typescript
|
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
|
import { Context, Span } from '@opentelemetry/api'
import { ReadableSpan, SpanExporter, SpanProcessor } from '@opentelemetry/sdk-trace-base'
import { ExportResult, ExportResultCode } from '@opentelemetry/core'
import { Action, State, stateMachine } from 'ts-checked-fsm'
import { getActiveConfig } from './config.js'
import { TailSampleFn } from './sampling.js'
import { PostProcessorFn } from './types.js'
type CompletedTrace = {
traceId: string
localRootSpan: ReadableSpan
completedSpans: ReadableSpan[]
}
type InProgressTrace = {
inProgressSpanIds: Set<string>
} & CompletedTrace
type InitialState = State<'not_started'>
type InProgressTraceState = State<'in_progress', InProgressTrace>
type TraceCompleteState = State<'trace_complete', CompletedTrace>
type ExportingState = State<'exporting', { promise: Promise<ExportResult> }>
type DoneState = State<'done'>
type StartExportArguments = {
exporter: SpanExporter
tailSampler: TailSampleFn
postProcessor: PostProcessorFn
}
type StartSpanAction = Action<'startSpan', { span: Span }>
type EndSpanAction = Action<'endSpan', { span: ReadableSpan }>
type StartExportAction = Action<'startExport', { args: StartExportArguments }>
function newTrace(currentState: InitialState, { span }: StartSpanAction): InProgressTraceState {
const spanId = span.spanContext().spanId
return {
...currentState,
stateName: 'in_progress',
traceId: span.spanContext().traceId,
localRootSpan: span as unknown as ReadableSpan,
completedSpans: [] as ReadableSpan[],
inProgressSpanIds: new Set([spanId]),
} as const
}
function newSpan(currentState: InProgressTraceState, { span }: StartSpanAction): InProgressTraceState {
const spanId = span.spanContext().spanId
currentState.inProgressSpanIds.add(spanId)
return { ...currentState }
}
function endSpan(
currentState: InProgressTraceState,
{ span }: EndSpanAction
): InProgressTraceState | TraceCompleteState {
currentState.completedSpans.push(span)
currentState.inProgressSpanIds.delete(span.spanContext().spanId)
if (currentState.inProgressSpanIds.size === 0) {
return {
stateName: 'trace_complete',
traceId: currentState.traceId,
localRootSpan: currentState.localRootSpan,
completedSpans: currentState.completedSpans,
} as const
} else {
return { ...currentState }
}
}
function startExport(currentState: TraceCompleteState, { args }: StartExportAction): ExportingState | DoneState {
const { exporter, tailSampler, postProcessor } = args
const { traceId, localRootSpan, completedSpans: spans } = currentState
const shouldExport
|
= tailSampler({ traceId, localRootSpan, spans })
if (shouldExport) {
|
const exportSpans = postProcessor(spans)
const promise = new Promise<ExportResult>((resolve) => {
exporter.export(exportSpans, resolve)
})
return { stateName: 'exporting', promise }
} else {
return { stateName: 'done' }
}
}
const { nextState } = stateMachine()
.state('not_started')
.state<'in_progress', InProgressTraceState>('in_progress')
.state<'trace_complete', TraceCompleteState>('trace_complete')
.state<'exporting', ExportingState>('exporting')
.state('done')
.transition('not_started', 'in_progress')
.transition('in_progress', 'in_progress')
.transition('in_progress', 'trace_complete')
.transition('trace_complete', 'exporting')
.transition('trace_complete', 'done')
.transition('exporting', 'done')
.action<'startSpan', StartSpanAction>('startSpan')
.action<'endSpan', EndSpanAction>('endSpan')
.action<'startExport', StartExportAction>('startExport')
.action('exportDone')
.actionHandler('not_started', 'startSpan', newTrace)
.actionHandler('in_progress', 'startSpan', newSpan)
.actionHandler('in_progress', 'endSpan', endSpan)
.actionHandler('trace_complete', 'startExport', startExport)
.actionHandler('exporting', 'exportDone', (_c, _a) => {
return { stateName: 'done' } as const
})
.done()
type AnyTraceState = Parameters<typeof nextState>[0]
type AnyTraceAction = Parameters<typeof nextState>[1]
export class BatchTraceSpanProcessor implements SpanProcessor {
private traces: Map<string, AnyTraceState> = new Map()
private inprogressExports: Map<string, Promise<ExportResult>> = new Map()
private action(traceId: string, action: AnyTraceAction): AnyTraceState {
const state = this.traces.get(traceId) || { stateName: 'not_started' }
const newState = nextState(state, action)
if (newState.stateName === 'done') {
this.traces.delete(traceId)
} else {
this.traces.set(traceId, newState)
}
return newState
}
private export(traceId: string) {
const { exporter, sampling, postProcessor } = getActiveConfig()
const exportArgs = { exporter, tailSampler: sampling.tailSampler, postProcessor }
const newState = this.action(traceId, { actionName: 'startExport', args: exportArgs })
if (newState.stateName === 'exporting') {
const promise = newState.promise
this.inprogressExports.set(traceId, promise)
promise.then((result) => {
if (result.code === ExportResultCode.FAILED) {
console.log('Error sending spans to exporter:', result.error)
}
this.action(traceId, { actionName: 'exportDone' })
this.inprogressExports.delete(traceId)
})
}
}
onStart(span: Span, _parentContext: Context): void {
const traceId = span.spanContext().traceId
this.action(traceId, { actionName: 'startSpan', span })
}
onEnd(span: ReadableSpan): void {
const traceId = span.spanContext().traceId
const state = this.action(traceId, { actionName: 'endSpan', span })
if (state.stateName === 'trace_complete') {
this.export(traceId)
}
}
async forceFlush(): Promise<void> {
await Promise.allSettled(this.inprogressExports.values())
}
async shutdown(): Promise<void> {}
}
|
src/spanprocessor.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/common.ts",
"retrieved_chunk": "export async function exportSpans(tracker?: PromiseTracker) {\n\tconst tracer = trace.getTracer('export')\n\tif (tracer instanceof WorkerTracer) {\n\t\tawait scheduler.wait(1)\n\t\tif (tracker) {\n\t\t\tawait tracker.wait()\n\t\t}\n\t\tawait tracer.spanProcessor.forceFlush()\n\t} else {\n\t\tconsole.error('The global tracer is not of type WorkerTracer and can not export spans')",
"score": 0.8067225813865662
},
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 0.8022093176841736
},
{
"filename": "src/sampling.ts",
"retrieved_chunk": "\t\treturn samplers.reduce((result, sampler) => result || sampler(traceInfo), false)\n\t}\n}\nexport const isHeadSampled: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.spanContext().traceFlags === TraceFlags.SAMPLED\n}\nexport const isRootErrorSpan: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.status.code === SpanStatusCode.ERROR",
"score": 0.8016155958175659
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "\t\t\t? headSampleConf\n\t\t\t: createSampler(headSampleConf)\n\t\t: new AlwaysOnSampler()\n\treturn {\n\t\texporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),\n\t\tfetch: {\n\t\t\tincludeTraceContext: supplied.fetch?.includeTraceContext ?? true,\n\t\t},\n\t\thandlers: {\n\t\t\tfetch: {",
"score": 0.8013482093811035
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "\t\t\t\tacceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,\n\t\t\t},\n\t\t},\n\t\tpostProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),\n\t\tsampling: {\n\t\t\theadSampler,\n\t\t\ttailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),\n\t\t},\n\t\tservice: supplied.service,\n\t}",
"score": 0.797799289226532
}
] |
typescript
|
= tailSampler({ traceId, localRootSpan, spans })
if (shouldExport) {
|
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
instructionSet: InstructionSet,
program: AssembledProgram
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
if (hasInstruction && program.unmatchedLabels.length > 0) {
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program.matchedLabels[label.label];
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
const matches = labelRegex.exec(line);
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
|
await readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
|
const outputBuffer = outputInstructions(program, word16Align);
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
|
src/assembler.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": " await readByLines(path, (line, lineNumber) =>\n parseArchLine(line, lineNumber, instructionSet)\n );\n return instructionSet;\n};",
"score": 0.9025119543075562
},
{
"filename": "src/disassembler.ts",
"retrieved_chunk": "const archPath = path.join(__dirname, \"../bass/6200.arch\");\nconst inputFile = argv[2] as string;\nconst outputFile = argv[3] as string;\nconst build = async () => {\n const instructionSet = await readArch(archPath);\n const sortedInstructions = instructionSet.instructions.sort(\n (a, b) => a.sortableOpcode - b.sortableOpcode\n );\n const buffer = readFileSync(inputFile);\n const outputString = parseBinaryBuffer(buffer, sortedInstructions);",
"score": 0.8845646381378174
},
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": "};\n/**\n * Reads and parses the BASS arch file\n * @param path The path of the arch file\n * @returns The InstructionSet resulting from parsing the arch file\n */\nexport const readArch = async (path: string): Promise<InstructionSet> => {\n const instructionSet: InstructionSet = {\n instructions: [],\n };",
"score": 0.8553744554519653
},
{
"filename": "src/disassembler.ts",
"retrieved_chunk": "import { readFileSync, writeFileSync } from \"fs\";\nimport path from \"path\";\nimport { argv } from \"process\";\nimport { parseBinaryBuffer } from \"./lib/disassembly\";\nimport { readArch } from \"./lib/fs\";\nif (argv.length != 4) {\n console.log(`Received ${argv.length - 2} arguments. Expected 2\\n`);\n console.log(\"Usage: node disassembler.js [input.bin] [output.asm]\");\n process.exit(1);\n}",
"score": 0.8453288674354553
},
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": "import fs from \"fs\";\nimport readline from \"readline\";\nimport events from \"events\";\nimport { InstructionSet, parseArchLine } from \"./bass\";\n/**\n * Opens a file and streams it out, line by line\n * @param path The path of the file to read\n * @param onLine A callback used to respond to each line content and its line number\n */\nexport const readByLines = async (",
"score": 0.835680365562439
}
] |
typescript
|
await readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
|
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
instructionSet: InstructionSet,
program: AssembledProgram
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
if (hasInstruction && program.unmatchedLabels.length > 0) {
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program.matchedLabels[label.label];
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
const
|
matches = labelRegex.exec(line);
|
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
await readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
const outputBuffer = outputInstructions(program, word16Align);
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
|
src/assembler.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " } else {\n // Label\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }\n console.log(`${label.address.toString(16)}`);\n opcode = label.address;\n }",
"score": 0.8677788972854614
},
{
"filename": "src/lib/types.ts",
"retrieved_chunk": " >;\n matchedLabels: {\n [name: string]: {\n lineNumber: number;\n instructionIndex: number;\n address: number;\n };\n };\n unmatchedLabels: Array<{\n label: string;",
"score": 0.8514679670333862
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " instruction.immediate\n );\n break;\n }\n case \"label\": {\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }",
"score": 0.8489788770675659
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " type: \"immediate\",\n regex: cleanAndFinishInstructionRegex(instructionLine),\n immediate: {\n bitCount,\n stringIndex: index,\n stringLength: matchString.length,\n },\n opcodeString,\n sortableOpcode,\n originalInstruction: originalInstruction.trim(),",
"score": 0.8332411050796509
},
{
"filename": "src/lib/disassembly.ts",
"retrieved_chunk": " if (lineLabel) {\n output += `\\n${lineLabel.name}:\\n`;\n }\n output += ` ${buildDisassembledInstructionString(\n instruction,\n immediateLabel\n )}\\n`;\n address += 1;\n }\n return output;",
"score": 0.8280395269393921
}
] |
typescript
|
matches = labelRegex.exec(line);
|
import {
Tracer,
TraceFlags,
Span,
SpanKind,
SpanOptions,
Context,
context as api_context,
trace,
} from '@opentelemetry/api'
import { sanitizeAttributes } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SpanProcessor, RandomIdGenerator, ReadableSpan, SamplingDecision } from '@opentelemetry/sdk-trace-base'
import { SpanImpl } from './span.js'
import { getActiveConfig } from './config.js'
export class WorkerTracer implements Tracer {
private readonly _spanProcessor: SpanProcessor
private readonly resource: Resource
private readonly idGenerator: RandomIdGenerator = new RandomIdGenerator()
constructor(spanProcessor: SpanProcessor, resource: Resource) {
this._spanProcessor = spanProcessor
this.resource = resource
}
get spanProcessor() {
return this._spanProcessor
}
addToResource(extra: Resource) {
this.resource.merge(extra)
}
startSpan(name: string, options: SpanOptions = {}, context = api_context.active()): Span {
if (options.root) {
context = trace.deleteSpan(context)
}
const parentSpan = trace.getSpan(context)
const parentSpanContext = parentSpan?.spanContext()
const hasParentContext = parentSpanContext && trace.isSpanContextValid(parentSpanContext)
const traceId = hasParentContext ? parentSpanContext.traceId : this.idGenerator.generateTraceId()
const spanKind = options.kind || SpanKind.INTERNAL
const sanitisedAttrs = sanitizeAttributes(options.attributes)
const sampler = getActiveConfig().sampling.headSampler
const samplingDecision = sampler.shouldSample(context, traceId, name, spanKind, sanitisedAttrs, [])
const { decision, traceState, attributes: attrs } = samplingDecision
const attributes = Object.assign({}, sanitisedAttrs, attrs)
const spanId = this.idGenerator.generateSpanId()
const parentSpanId = hasParentContext ? parentSpanContext.spanId : undefined
const traceFlags = decision === SamplingDecision.RECORD_AND_SAMPLED ? TraceFlags.SAMPLED : TraceFlags.NONE
const spanContext = { traceId, spanId, traceFlags, traceState }
|
const span = new SpanImpl({
|
attributes,
name,
onEnd: (span) => {
this.spanProcessor.onEnd(span as unknown as ReadableSpan)
},
resource: this.resource,
spanContext,
parentSpanId,
spanKind,
startTime: options.startTime,
})
//Do not get me started on the idosyncracies of the Otel JS libraries.
//@ts-ignore
this.spanProcessor.onStart(span, context)
return span
}
startActiveSpan<F extends (span: Span) => ReturnType<F>>(name: string, fn: F): ReturnType<F>
startActiveSpan<F extends (span: Span) => ReturnType<F>>(name: string, options: SpanOptions, fn: F): ReturnType<F>
startActiveSpan<F extends (span: Span) => ReturnType<F>>(
name: string,
options: SpanOptions,
context: Context,
fn: F
): ReturnType<F>
startActiveSpan<F extends (span: Span) => ReturnType<F>>(name: string, ...args: unknown[]): ReturnType<F> {
const options = args.length > 1 ? (args[0] as SpanOptions) : undefined
const parentContext = args.length > 2 ? (args[1] as Context) : api_context.active()
const fn = args[args.length - 1] as F
const span = this.startSpan(name, options, parentContext)
const contextWithSpanSet = trace.setSpan(parentContext, span)
return api_context.with(contextWithSpanSet, fn, undefined, span)
}
}
|
src/tracer.ts
|
evanderkoogh-otel-cf-workers-18fd741
|
[
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tconst options: SpanOptions = { kind: SpanKind.CLIENT, attributes: attrs }\n\t\t\tconst host = new URL(request.url).host\n\t\t\tconst spanName = typeof attrs?.['name'] === 'string' ? attrs?.['name'] : `fetch: ${host}`\n\t\t\tconst promise = tracer.startActiveSpan(spanName, options, async (span) => {\n\t\t\t\tconst includeTraceContext =\n\t\t\t\t\ttypeof config.includeTraceContext === 'function'\n\t\t\t\t\t\t? config.includeTraceContext(request)\n\t\t\t\t\t\t: config.includeTraceContext\n\t\t\t\tif (includeTraceContext ?? true) {\n\t\t\t\t\tpropagation.inject(api_context.active(), request.headers, {",
"score": 0.8594188690185547
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'\nimport { SemanticAttributes } from '@opentelemetry/semantic-conventions'\nimport { passthroughGet, unwrap, wrap } from '../wrap.js'\nimport {\n\tgetParentContextFromHeaders,\n\tgatherIncomingCfAttributes,\n\tgatherRequestAttributes,\n\tgatherResponseAttributes,\n\tinstrumentFetcher,\n} from './fetch.js'",
"score": 0.84649658203125
},
{
"filename": "src/span.ts",
"retrieved_chunk": "\tprivate _droppedEventsCount: number = 0\n\tprivate _droppedLinksCount: number = 0\n\tconstructor(init: SpanInit) {\n\t\tthis.name = init.name\n\t\tthis._spanContext = init.spanContext\n\t\tthis.parentSpanId = init.parentSpanId\n\t\tthis.kind = init.spanKind || SpanKind.INTERNAL\n\t\tthis.attributes = sanitizeAttributes(init.attributes)\n\t\tthis.startTime = getHrTime(init.startTime)\n\t\tthis.links = init.links || []",
"score": 0.8432044982910156
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "import {\n\ttrace,\n\tSpanOptions,\n\tSpanKind,\n\tpropagation,\n\tcontext as api_context,\n\tAttributes,\n\tException,\n\tContext,\n\tSpanStatusCode,",
"score": 0.8426540493965149
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t[SemanticAttributes.FAAS_COLDSTART]: cold_start,\n\t\t[SemanticAttributes.FAAS_EXECUTION]: request.headers.get('cf-ray') ?? undefined,\n\t}\n\tcold_start = false\n\tObject.assign(attributes, gatherRequestAttributes(request))\n\tObject.assign(attributes, gatherIncomingCfAttributes(request))\n\tconst options: SpanOptions = {\n\t\tattributes,\n\t\tkind: SpanKind.SERVER,\n\t}",
"score": 0.841772198677063
}
] |
typescript
|
const span = new SpanImpl({
|
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
instructionSet: InstructionSet,
program: AssembledProgram
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
if (hasInstruction && program.unmatchedLabels.length > 0) {
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program.matchedLabels[label.label];
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
const matches = labelRegex.exec(line);
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
await readByLines(inputFile,
|
(line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
|
const outputBuffer = outputInstructions(program, word16Align);
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
|
src/assembler.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": " await readByLines(path, (line, lineNumber) =>\n parseArchLine(line, lineNumber, instructionSet)\n );\n return instructionSet;\n};",
"score": 0.9013264775276184
},
{
"filename": "src/disassembler.ts",
"retrieved_chunk": "const archPath = path.join(__dirname, \"../bass/6200.arch\");\nconst inputFile = argv[2] as string;\nconst outputFile = argv[3] as string;\nconst build = async () => {\n const instructionSet = await readArch(archPath);\n const sortedInstructions = instructionSet.instructions.sort(\n (a, b) => a.sortableOpcode - b.sortableOpcode\n );\n const buffer = readFileSync(inputFile);\n const outputString = parseBinaryBuffer(buffer, sortedInstructions);",
"score": 0.8728872537612915
},
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": "};\n/**\n * Reads and parses the BASS arch file\n * @param path The path of the arch file\n * @returns The InstructionSet resulting from parsing the arch file\n */\nexport const readArch = async (path: string): Promise<InstructionSet> => {\n const instructionSet: InstructionSet = {\n instructions: [],\n };",
"score": 0.8451828956604004
},
{
"filename": "src/disassembler.ts",
"retrieved_chunk": "import { readFileSync, writeFileSync } from \"fs\";\nimport path from \"path\";\nimport { argv } from \"process\";\nimport { parseBinaryBuffer } from \"./lib/disassembly\";\nimport { readArch } from \"./lib/fs\";\nif (argv.length != 4) {\n console.log(`Received ${argv.length - 2} arguments. Expected 2\\n`);\n console.log(\"Usage: node disassembler.js [input.bin] [output.asm]\");\n process.exit(1);\n}",
"score": 0.8325183391571045
},
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": "import fs from \"fs\";\nimport readline from \"readline\";\nimport events from \"events\";\nimport { InstructionSet, parseArchLine } from \"./bass\";\n/**\n * Opens a file and streams it out, line by line\n * @param path The path of the file to read\n * @param onLine A callback used to respond to each line content and its line number\n */\nexport const readByLines = async (",
"score": 0.826634407043457
}
] |
typescript
|
(line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
|
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
instructionSet: InstructionSet,
program: AssembledProgram
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
if (hasInstruction && program.unmatchedLabels.length > 0) {
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program.matchedLabels[label.label];
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
const matches = labelRegex.exec(line);
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
await readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
|
const outputBuffer = outputInstructions(program, word16Align);
|
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
|
src/assembler.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": " await readByLines(path, (line, lineNumber) =>\n parseArchLine(line, lineNumber, instructionSet)\n );\n return instructionSet;\n};",
"score": 0.8787766098976135
},
{
"filename": "src/disassembler.ts",
"retrieved_chunk": "const archPath = path.join(__dirname, \"../bass/6200.arch\");\nconst inputFile = argv[2] as string;\nconst outputFile = argv[3] as string;\nconst build = async () => {\n const instructionSet = await readArch(archPath);\n const sortedInstructions = instructionSet.instructions.sort(\n (a, b) => a.sortableOpcode - b.sortableOpcode\n );\n const buffer = readFileSync(inputFile);\n const outputString = parseBinaryBuffer(buffer, sortedInstructions);",
"score": 0.8718410730361938
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " program: AssembledProgram,\n word16Align: boolean\n): Option<Buffer> => {\n // This buffer stores each nibble of the program separately, and we will combine this later into the output buffer\n const threeNibbleBuffer: number[] = new Array(8192 * 3);\n // Fill array with 0xF\n for (let i = 0; i < threeNibbleBuffer.length; i++) {\n threeNibbleBuffer[i] = 0xf;\n }\n for (const instruction of program.matchedInstructions) {",
"score": 0.8614097237586975
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": "import { log } from \"./log\";\nimport { AssembledProgram, Option } from \"./types\";\nimport { maskOfSize } from \"./util\";\n/**\n * Builds the output buffer from the matched instructions\n * @param program The configured program we have built\n * @param word16Align If true, align the 12 bit opcodes to 16 bit words. The lowest nibble will be 0\n * @returns The output buffer that should be written to the assembled binary\n */\nexport const outputInstructions = (",
"score": 0.8397752642631531
},
{
"filename": "src/disassembler.ts",
"retrieved_chunk": "import { readFileSync, writeFileSync } from \"fs\";\nimport path from \"path\";\nimport { argv } from \"process\";\nimport { parseBinaryBuffer } from \"./lib/disassembly\";\nimport { readArch } from \"./lib/fs\";\nif (argv.length != 4) {\n console.log(`Received ${argv.length - 2} arguments. Expected 2\\n`);\n console.log(\"Usage: node disassembler.js [input.bin] [output.asm]\");\n process.exit(1);\n}",
"score": 0.8357246518135071
}
] |
typescript
|
const outputBuffer = outputInstructions(program, word16Align);
|
import { ImmediateInstruction, Instruction } from "./bass";
import { buildDisassembledInstructionString } from "./display";
import { DisassembledInstruction } from "./types";
import { maskOfSize } from "./util";
export const parseBinaryBuffer = (
buffer: Buffer,
instructions: Instruction[]
): string => {
const disassembledInstructions: DisassembledInstruction[] = [];
const unsetLabels: Array<DisassembledInstruction[] | undefined> = new Array(
8192
);
for (let i = 0; i < buffer.length; i += 2) {
const highByte = buffer[i]!;
const lowByte = buffer[i + 1]!;
const address = i / 2;
const correctedWord = (highByte << 8) | lowByte;
const instruction = findWordInstruction(correctedWord, instructions);
const disassembledInstruction: DisassembledInstruction = {
instruction,
actualWord: correctedWord,
address,
};
if (isFlowControlWithImmediate(instruction)) {
// Convert local address into global one
const pcLowerByte =
correctedWord & maskOfSize(instruction.immediate.bitCount);
let pcUpperFive = (address >> 8) & 0x1f;
if (isCalz(instruction)) {
// calz is only zero page and prevents pset
pcUpperFive = 0;
} else {
const lastInstruction =
disassembledInstructions[disassembledInstructions.length - 1]!;
if (isPset(lastInstruction.instruction)) {
// PSET immediate determines our upper 5 bits
pcUpperFive = lastInstruction.actualWord & 0x1f;
}
}
const pc = (pcUpperFive << 8) | pcLowerByte;
const existingLabel = unsetLabels[pc];
if (existingLabel) {
existingLabel.push(disassembledInstruction);
} else {
unsetLabels[pc] = [disassembledInstruction];
}
}
disassembledInstructions.push(disassembledInstruction);
}
// Build label names
let labelCount = 0;
const namedLabels: Array<
| {
name: string;
instructions: DisassembledInstruction[];
}
| undefined
> = unsetLabels.map((instructions) => {
if (!!instructions) {
return {
name: `label_${labelCount++}`,
instructions,
};
}
return undefined;
});
// Build list of instructions that will replace the immedates with these labels, and build labels
const labelUsageMap: Array<string | undefined> = new Array(8192);
for (const namedLabel of namedLabels) {
if (namedLabel) {
for (const instruction of namedLabel.instructions) {
labelUsageMap[instruction.address] = namedLabel.name;
}
}
}
let output = "";
let address = 0;
for (const instruction of disassembledInstructions) {
const immediateLabel = labelUsageMap[instruction.address];
const lineLabel = namedLabels[instruction.address];
if (lineLabel) {
output += `\n${lineLabel.name}:\n`;
}
output += ` ${buildDisassembledInstructionString(
instruction,
immediateLabel
)}\n`;
address += 1;
}
return output;
};
const findWordInstruction = (word: number, instructions: Instruction[]) => {
// Naive because it doesn't really matter
let bestMatch = instructions[0]!;
for (let i = 0; i < instructions.length; i++) {
const instruction = instructions[i]!;
if (instruction.sortableOpcode <= word) {
bestMatch = instruction;
} else {
// We've passed the best solution, end
break;
}
}
return bestMatch;
};
const flowControlImmediateMnemonics = ((): Set<string> =>
new Set<string>(["call", "calz", "jp"]))();
const extractMnemonic = (instruction: Instruction): string =>
instruction.originalInstruction.split(/\s/)[0]!.trim();
const isFlowControlWithImmediate = (
instruction: Instruction
|
): instruction is ImmediateInstruction => {
|
const mnemonic = extractMnemonic(instruction);
return flowControlImmediateMnemonics.has(mnemonic);
};
const isPset = (instruction: Instruction): boolean => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "pset";
};
const isCalz = (instruction: Instruction) => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "calz";
};
|
src/lib/disassembly.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " instruction.immediate\n );\n break;\n }\n case \"label\": {\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }",
"score": 0.8263663649559021
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " instructions: Array<Instruction>;\n}\nexport type Instruction = ImmediateInstruction | LiteralInstruction;\nexport interface InstructionBase {\n regex: RegExp;\n opcodeString: string;\n sortableOpcode: number;\n originalInstruction: string;\n}\nexport type ImmediateInstruction = InstructionBase & {",
"score": 0.8144048452377319
},
{
"filename": "src/lib/display.ts",
"retrieved_chunk": "import { DisassembledInstruction } from \"./types\";\nimport { isLetterChar, maskOfSize } from \"./util\";\nexport const buildDisassembledInstructionString = (\n { instruction, actualWord, address }: DisassembledInstruction,\n immediateLabel: string | undefined\n) => {\n let instructionString = instruction.originalInstruction;\n if (instruction.type === \"immediate\") {\n const { bitCount, stringIndex, stringLength } = instruction.immediate;\n const immediatePrefix = instructionString.substring(0, stringIndex);",
"score": 0.8135846853256226
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " });\n } else if (matches[2] !== undefined) {\n // potential label\n if (instruction.type !== \"immediate\") {\n log(\n \"Attempted to match content with non-immediate instruction\",\n lineNumber\n );\n return;\n }",
"score": 0.8097872734069824
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " const matches = instruction.regex.exec(line);\n const address = program.currentAddress;\n if (!!matches && matches.length > 0) {\n if (matches[1] !== undefined) {\n // immediate\n if (instruction.type !== \"immediate\") {\n log(\n \"Attempted to match content with non-immediate instruction\",\n lineNumber\n );",
"score": 0.8048335313796997
}
] |
typescript
|
): instruction is ImmediateInstruction => {
|
import {
bassNumberRegex,
instructionPrefixRegex,
instructionSuffixRegex,
whitespaceRegex,
} from "./regex";
import { log } from "./log";
import { parseNumber } from "./util";
import { buildOpcode } from "./opcodeOutput";
export interface InstructionSet {
instructions: Array<Instruction>;
}
export type Instruction = ImmediateInstruction | LiteralInstruction;
export interface InstructionBase {
regex: RegExp;
opcodeString: string;
sortableOpcode: number;
originalInstruction: string;
}
export type ImmediateInstruction = InstructionBase & {
type: "immediate";
immediate: {
bitCount: number;
/**
* The index in the originalInstruction the immediate occurs
*/
stringIndex: number;
/**
* The length of the immediate in the originalInstruction string
*/
stringLength: number;
};
};
export type LiteralInstruction = InstructionBase & {
type: "literal";
};
/**
* Parses a single line of a BASS architecture file
* @param line The line being parsed
* @param lineNumber The one-based index of the line being processed
* @param config The global instruction set config
* @returns
*/
export const parseArchLine = (
line: string,
lineNumber: number,
config: InstructionSet
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith("#")) {
// Comment. Skip
return;
}
const sections = line.split(";");
if (sections.length != 2) {
log(
"Unexpected semicolon. Does this instruction have an output?",
lineNumber
);
return;
}
const [originalInstruction, opcode] = sections;
if (!originalInstruction || !opcode) {
log("Unknown input", lineNumber);
return;
}
const opcodeString = opcode.trim();
let numberMatch = originalInstruction.match(bassNumberRegex);
if (!!numberMatch && numberMatch.index) {
// This instruction contains a star followed by a number
// This is an immediate
const matchString = numberMatch[0];
// This is guaranteed to exist due to the regex
|
const bitCount = parseNumber(numberMatch[1]!);
|
const index = numberMatch.index;
const instructionLine =
originalInstruction.substring(0, index) +
"(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))" +
originalInstruction.substring(index + matchString.length);
const sortableOpcode = buildSortableOpcode(opcodeString, bitCount);
config.instructions.push({
type: "immediate",
regex: cleanAndFinishInstructionRegex(instructionLine),
immediate: {
bitCount,
stringIndex: index,
stringLength: matchString.length,
},
opcodeString,
sortableOpcode,
originalInstruction: originalInstruction.trim(),
});
} else {
// This is a literal
const sortableOpcode = buildSortableOpcode(opcodeString, 0);
config.instructions.push({
type: "literal",
regex: cleanAndFinishInstructionRegex(originalInstruction),
opcodeString,
sortableOpcode,
originalInstruction: originalInstruction.trim(),
});
}
};
const buildSortableOpcode = (template: string, bitCount: number) =>
buildOpcode(template, bitCount, 0);
const cleanAndFinishInstructionRegex = (instruction: string): RegExp => {
const cleaned = instruction
.trim()
.replace(whitespaceRegex, whitespaceRegex.source);
// Force nothing but whitespace from beginning of string to instruction
// Force nothing but whitespace and a comment from instruction to end of string
return new RegExp(
instructionPrefixRegex.source + cleaned + instructionSuffixRegex.source
);
};
|
src/lib/bass.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/assembler.ts",
"retrieved_chunk": " return;\n }\n program.matchedInstructions.push({\n type: \"immediate\",\n line,\n immediate: parseNumber(matches[1]),\n opcodeString: instruction.opcodeString,\n bitCount: instruction.immediate.bitCount,\n lineNumber,\n address,",
"score": 0.8861913681030273
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " let opcode = 0;\n switch (instruction.type) {\n case \"literal\": {\n opcode = buildOpcode(instruction.opcodeString, 0, 0);\n break;\n }\n case \"immediate\": {\n opcode = buildOpcode(\n instruction.opcodeString,\n instruction.bitCount,",
"score": 0.8755148649215698
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " opcode = buildOpcode(\n instruction.opcodeString,\n instruction.bitCount,\n label.address\n );\n break;\n }\n case \"constant\": {\n if (instruction.subtype === \"literal\") {\n opcode = instruction.value;",
"score": 0.8545222282409668
},
{
"filename": "src/lib/display.ts",
"retrieved_chunk": " const immediateSuffix = instructionString.substring(\n stringIndex + stringLength\n );\n let immediate = \"\";\n if (immediateLabel) {\n immediate = immediateLabel;\n } else {\n const argument = maskOfSize(bitCount) & actualWord;\n if (isLetterChar(immediatePrefix.charAt(immediatePrefix.length - 1))) {\n // If letter, treat as decimal",
"score": 0.850201427936554
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " program.matchedInstructions.push({\n type: \"label\",\n line,\n label: matches[2],\n opcodeString: instruction.opcodeString,\n bitCount: instruction.immediate.bitCount,\n lineNumber,\n address,\n });\n } else {",
"score": 0.8472803831100464
}
] |
typescript
|
const bitCount = parseNumber(numberMatch[1]!);
|
import { ImmediateInstruction, Instruction } from "./bass";
import { buildDisassembledInstructionString } from "./display";
import { DisassembledInstruction } from "./types";
import { maskOfSize } from "./util";
export const parseBinaryBuffer = (
buffer: Buffer,
instructions: Instruction[]
): string => {
const disassembledInstructions: DisassembledInstruction[] = [];
const unsetLabels: Array<DisassembledInstruction[] | undefined> = new Array(
8192
);
for (let i = 0; i < buffer.length; i += 2) {
const highByte = buffer[i]!;
const lowByte = buffer[i + 1]!;
const address = i / 2;
const correctedWord = (highByte << 8) | lowByte;
const instruction = findWordInstruction(correctedWord, instructions);
const disassembledInstruction: DisassembledInstruction = {
instruction,
actualWord: correctedWord,
address,
};
if (isFlowControlWithImmediate(instruction)) {
// Convert local address into global one
const pcLowerByte =
correctedWord & maskOfSize(instruction.immediate.bitCount);
let pcUpperFive = (address >> 8) & 0x1f;
if (isCalz(instruction)) {
// calz is only zero page and prevents pset
pcUpperFive = 0;
} else {
const lastInstruction =
disassembledInstructions[disassembledInstructions.length - 1]!;
if (isPset(lastInstruction.instruction)) {
// PSET immediate determines our upper 5 bits
pcUpperFive = lastInstruction.actualWord & 0x1f;
}
}
const pc = (pcUpperFive << 8) | pcLowerByte;
const existingLabel = unsetLabels[pc];
if (existingLabel) {
existingLabel.push(disassembledInstruction);
} else {
unsetLabels[pc] = [disassembledInstruction];
}
}
disassembledInstructions.push(disassembledInstruction);
}
// Build label names
let labelCount = 0;
const namedLabels: Array<
| {
name: string;
instructions: DisassembledInstruction[];
}
| undefined
> = unsetLabels.map((instructions) => {
if (!!instructions) {
return {
name: `label_${labelCount++}`,
instructions,
};
}
return undefined;
});
// Build list of instructions that will replace the immedates with these labels, and build labels
const labelUsageMap: Array<string | undefined> = new Array(8192);
for (const namedLabel of namedLabels) {
if (namedLabel) {
for (const instruction of namedLabel.instructions) {
labelUsageMap[instruction.address] = namedLabel.name;
}
}
}
let output = "";
let address = 0;
for (const instruction of disassembledInstructions) {
const immediateLabel = labelUsageMap[instruction.address];
const lineLabel = namedLabels[instruction.address];
if (lineLabel) {
output += `\n${lineLabel.name}:\n`;
}
output +=
|
` ${buildDisassembledInstructionString(
instruction,
immediateLabel
)}\n`;
|
address += 1;
}
return output;
};
const findWordInstruction = (word: number, instructions: Instruction[]) => {
// Naive because it doesn't really matter
let bestMatch = instructions[0]!;
for (let i = 0; i < instructions.length; i++) {
const instruction = instructions[i]!;
if (instruction.sortableOpcode <= word) {
bestMatch = instruction;
} else {
// We've passed the best solution, end
break;
}
}
return bestMatch;
};
const flowControlImmediateMnemonics = ((): Set<string> =>
new Set<string>(["call", "calz", "jp"]))();
const extractMnemonic = (instruction: Instruction): string =>
instruction.originalInstruction.split(/\s/)[0]!.trim();
const isFlowControlWithImmediate = (
instruction: Instruction
): instruction is ImmediateInstruction => {
const mnemonic = extractMnemonic(instruction);
return flowControlImmediateMnemonics.has(mnemonic);
};
const isPset = (instruction: Instruction): boolean => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "pset";
};
const isCalz = (instruction: Instruction) => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "calz";
};
|
src/lib/disassembly.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/display.ts",
"retrieved_chunk": "import { DisassembledInstruction } from \"./types\";\nimport { isLetterChar, maskOfSize } from \"./util\";\nexport const buildDisassembledInstructionString = (\n { instruction, actualWord, address }: DisassembledInstruction,\n immediateLabel: string | undefined\n) => {\n let instructionString = instruction.originalInstruction;\n if (instruction.type === \"immediate\") {\n const { bitCount, stringIndex, stringLength } = instruction.immediate;\n const immediatePrefix = instructionString.substring(0, stringIndex);",
"score": 0.8644866943359375
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " } else {\n // Label\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }\n console.log(`${label.address.toString(16)}`);\n opcode = label.address;\n }",
"score": 0.8622563481330872
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " instruction.immediate\n );\n break;\n }\n case \"label\": {\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }",
"score": 0.8431777954101562
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " program.matchedInstructions.push({\n type: \"label\",\n line,\n label: matches[2],\n opcodeString: instruction.opcodeString,\n bitCount: instruction.immediate.bitCount,\n lineNumber,\n address,\n });\n } else {",
"score": 0.8425858020782471
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " lineNumber,\n address,\n });\n } else if (label !== undefined) {\n program.matchedInstructions.push({\n type: \"constant\",\n subtype: \"label\",\n label,\n line,\n lineNumber,",
"score": 0.8417941331863403
}
] |
typescript
|
` ${buildDisassembledInstructionString(
instruction,
immediateLabel
)}\n`;
|
import { DisassembledInstruction } from "./types";
import { isLetterChar, maskOfSize } from "./util";
export const buildDisassembledInstructionString = (
{ instruction, actualWord, address }: DisassembledInstruction,
immediateLabel: string | undefined
) => {
let instructionString = instruction.originalInstruction;
if (instruction.type === "immediate") {
const { bitCount, stringIndex, stringLength } = instruction.immediate;
const immediatePrefix = instructionString.substring(0, stringIndex);
const immediateSuffix = instructionString.substring(
stringIndex + stringLength
);
let immediate = "";
if (immediateLabel) {
immediate = immediateLabel;
} else {
const argument = maskOfSize(bitCount) & actualWord;
if (isLetterChar(immediatePrefix.charAt(immediatePrefix.length - 1))) {
// If letter, treat as decimal
immediate = argument.toString();
} else {
// Otherwise, treat as hex
immediate = `0x${argument.toString(16).toUpperCase()}`;
}
}
instructionString = `${immediatePrefix}${immediate}${immediateSuffix}`;
}
// Separate out instruction so that it formats nicely
// Four total columns
// Opcode - Source - Dest - Comments
const splitInstruction = instructionString.split(/\s+/);
let lastPadWidth = 0;
for (let i = 2; i >= splitInstruction.length - 1; i--) {
lastPadWidth += columnPadWidth(i);
}
const formattedInstructionString = splitInstruction
.
|
map((s, i) => {
|
const pad =
i === splitInstruction.length - 1 ? lastPadWidth : columnPadWidth(i);
return s.padEnd(pad);
})
.join("");
const comment = `// 0x${address
.toString(16)
.toUpperCase()
.padEnd(4)} (0x${actualWord.toString(16).toUpperCase()})`;
return `${formattedInstructionString.padEnd(81)}${comment}`;
};
const columnPadWidth = (column: number) => {
switch (column) {
case 0:
return 6;
case 1:
return 5;
case 2:
return 10;
}
return 0;
};
|
src/lib/display.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " });\n } else {\n // This is a literal\n const sortableOpcode = buildSortableOpcode(opcodeString, 0);\n config.instructions.push({\n type: \"literal\",\n regex: cleanAndFinishInstructionRegex(originalInstruction),\n opcodeString,\n sortableOpcode,\n originalInstruction: originalInstruction.trim(),",
"score": 0.8288834691047668
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " const matchString = numberMatch[0];\n // This is guaranteed to exist due to the regex\n const bitCount = parseNumber(numberMatch[1]!);\n const index = numberMatch.index;\n const instructionLine =\n originalInstruction.substring(0, index) +\n \"(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))\" +\n originalInstruction.substring(index + matchString.length);\n const sortableOpcode = buildSortableOpcode(opcodeString, bitCount);\n config.instructions.push({",
"score": 0.8280161023139954
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " type: \"immediate\",\n regex: cleanAndFinishInstructionRegex(instructionLine),\n immediate: {\n bitCount,\n stringIndex: index,\n stringLength: matchString.length,\n },\n opcodeString,\n sortableOpcode,\n originalInstruction: originalInstruction.trim(),",
"score": 0.8210120797157288
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " });\n }\n};\nconst buildSortableOpcode = (template: string, bitCount: number) =>\n buildOpcode(template, bitCount, 0);\nconst cleanAndFinishInstructionRegex = (instruction: string): RegExp => {\n const cleaned = instruction\n .trim()\n .replace(whitespaceRegex, whitespaceRegex.source);\n // Force nothing but whitespace from beginning of string to instruction",
"score": 0.812080979347229
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " // Force nothing but whitespace and a comment from instruction to end of string\n return new RegExp(\n instructionPrefixRegex.source + cleaned + instructionSuffixRegex.source\n );\n};",
"score": 0.8069062232971191
}
] |
typescript
|
map((s, i) => {
|
import { DisassembledInstruction } from "./types";
import { isLetterChar, maskOfSize } from "./util";
export const buildDisassembledInstructionString = (
{ instruction, actualWord, address }: DisassembledInstruction,
immediateLabel: string | undefined
) => {
let instructionString = instruction.originalInstruction;
if (instruction.type === "immediate") {
const { bitCount, stringIndex, stringLength } = instruction.immediate;
const immediatePrefix = instructionString.substring(0, stringIndex);
const immediateSuffix = instructionString.substring(
stringIndex + stringLength
);
let immediate = "";
if (immediateLabel) {
immediate = immediateLabel;
} else {
const argument = maskOfSize(bitCount) & actualWord;
if (isLetterChar(immediatePrefix.charAt(immediatePrefix.length - 1))) {
// If letter, treat as decimal
immediate = argument.toString();
} else {
// Otherwise, treat as hex
immediate = `0x${argument.toString(16).toUpperCase()}`;
}
}
instructionString = `${immediatePrefix}${immediate}${immediateSuffix}`;
}
// Separate out instruction so that it formats nicely
// Four total columns
// Opcode - Source - Dest - Comments
const splitInstruction = instructionString.split(/\s+/);
let lastPadWidth = 0;
for (let i = 2; i >= splitInstruction.length - 1; i--) {
lastPadWidth += columnPadWidth(i);
}
const formattedInstructionString = splitInstruction
.map
|
((s, i) => {
|
const pad =
i === splitInstruction.length - 1 ? lastPadWidth : columnPadWidth(i);
return s.padEnd(pad);
})
.join("");
const comment = `// 0x${address
.toString(16)
.toUpperCase()
.padEnd(4)} (0x${actualWord.toString(16).toUpperCase()})`;
return `${formattedInstructionString.padEnd(81)}${comment}`;
};
const columnPadWidth = (column: number) => {
switch (column) {
case 0:
return 6;
case 1:
return 5;
case 2:
return 10;
}
return 0;
};
|
src/lib/display.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " });\n } else {\n // This is a literal\n const sortableOpcode = buildSortableOpcode(opcodeString, 0);\n config.instructions.push({\n type: \"literal\",\n regex: cleanAndFinishInstructionRegex(originalInstruction),\n opcodeString,\n sortableOpcode,\n originalInstruction: originalInstruction.trim(),",
"score": 0.8278827667236328
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " const matchString = numberMatch[0];\n // This is guaranteed to exist due to the regex\n const bitCount = parseNumber(numberMatch[1]!);\n const index = numberMatch.index;\n const instructionLine =\n originalInstruction.substring(0, index) +\n \"(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))\" +\n originalInstruction.substring(index + matchString.length);\n const sortableOpcode = buildSortableOpcode(opcodeString, bitCount);\n config.instructions.push({",
"score": 0.826572060585022
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " type: \"immediate\",\n regex: cleanAndFinishInstructionRegex(instructionLine),\n immediate: {\n bitCount,\n stringIndex: index,\n stringLength: matchString.length,\n },\n opcodeString,\n sortableOpcode,\n originalInstruction: originalInstruction.trim(),",
"score": 0.8194513320922852
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " });\n }\n};\nconst buildSortableOpcode = (template: string, bitCount: number) =>\n buildOpcode(template, bitCount, 0);\nconst cleanAndFinishInstructionRegex = (instruction: string): RegExp => {\n const cleaned = instruction\n .trim()\n .replace(whitespaceRegex, whitespaceRegex.source);\n // Force nothing but whitespace from beginning of string to instruction",
"score": 0.8114023208618164
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " // Force nothing but whitespace and a comment from instruction to end of string\n return new RegExp(\n instructionPrefixRegex.source + cleaned + instructionSuffixRegex.source\n );\n};",
"score": 0.8081609010696411
}
] |
typescript
|
((s, i) => {
|
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A section that renders the conversation history.
*/
export class ConversationHistory extends PromptSectionBase {
public readonly variable: string;
public readonly userPrefix: string;
public readonly assistantPrefix: string;
/**
* Creates a new 'ConversationHistory' instance.
* @param variable Name of memory variable used to store the histories `Message[]`.
* @param tokens Optional. Sizing strategy for this section. Defaults to `proportional` with a value of `1.0`.
* @param required Optional. Indicates if this section is required. Defaults to `false`.
* @param userPrefix Optional. Prefix to use for user messages when rendering as text. Defaults to `user: `.
* @param assistantPrefix Optional. Prefix to use for assistant messages when rendering as text. Defaults to `assistant: `.
*/
public constructor(variable: string, tokens: number = 1.0, required: boolean = false, userPrefix: string = 'user: ', assistantPrefix: string = 'assistant: ', separator: string = '\n') {
super(tokens, required, separator);
this.variable = variable;
this.userPrefix = userPrefix;
this.assistantPrefix = assistantPrefix;
}
public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate history and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const separatorLength = tokenizer.encode(this.separator).length;
const lines: string[] = [];
for (let i = history.length - 1; i >= 0; i--) {
const msg = history[i];
|
const message: Message = { role: msg.role, content: Utilities.toString(tokenizer, msg.content) };
|
const prefix = message.role === 'user' ? this.userPrefix : this.assistantPrefix;
const line = prefix + message.content;
const length = tokenizer.encode(line).length + (lines.length > 0 ? separatorLength : 0);
// Add initial line if required
if (lines.length === 0 && this.required) {
tokens += length;
lines.unshift(line);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add line
tokens += length;
lines.unshift(line);
}
return { output: lines.join(this.separator), length: tokens, tooLong: tokens > maxTokens };
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate messages and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const messages: Message[] = [];
for (let i = history.length - 1; i >= 0; i--) {
// Clone message
const msg = history[i];
const message: Message = Object.assign({}, msg);
if (msg.content !== null) {
message.content = Utilities.toString(tokenizer, msg.content);
}
// Get message length
const length = tokenizer.encode(PromptSectionBase.getMessageText(message)).length;
// Add initial message if required
if (messages.length === 0 && this.required) {
tokens += length;
messages.unshift(message);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add message
tokens += length;
messages.unshift(message);
}
return { output: messages, length: tokens, tooLong: tokens > maxTokens };
}
}
|
src/ConversationHistory.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " maxTokens,\n (section) => section.renderAsMessages(memory, functions, tokenizer, maxTokens),\n (section, remaining) => section.renderAsMessages(memory, functions, tokenizer, remaining)\n );\n // Build output\n const output: Message[] = [];\n for (let i = 0; i < layout.length; i++) {\n const section = layout[i];\n if (section.layout) {\n output.push(...section.layout.output);",
"score": 0.8420411348342896
},
{
"filename": "src/TextSection.ts",
"retrieved_chunk": " this.text = text;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache length\n if (this._length < 0) {\n this._length = tokenizer.encode(this.text).length;\n }\n // Return output\n return this.returnMessages([{ role: this.role, content: this.text }], this._length, tokenizer, maxTokens);",
"score": 0.8386411666870117
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;\n protected returnMessages(output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {\n // Truncate if fixed length\n if (this.tokens > 1.0) {\n while (length > this.tokens) {\n const msg = output.pop();\n const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));\n length -= encoded.length;\n if (length < this.tokens) {\n const delta = this.tokens - length;",
"score": 0.8341180086135864
},
{
"filename": "src/Prompt.spec.ts",
"retrieved_chunk": " ]);\n const rendered = await prompt.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [\n { role: \"user\", content: \"Hello\" },\n { role: \"user\", content: \"There Big\" },\n { role: \"user\", content: \"World\" }\n ]);\n assert.equal(rendered.length, 4);\n assert.equal(rendered.tooLong, false);\n });",
"score": 0.8336396217346191
},
{
"filename": "src/ConversationHistory.spec.ts",
"retrieved_chunk": " assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should truncate its output to match available budget\", async () => {\n const section = new ConversationHistory('history', 1);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 1);\n assert.deepEqual(rendered.output, [\n { role: \"assistant\", content: \"Hi\" },\n ]);\n assert.equal(rendered.length, 1);",
"score": 0.8320354223251343
}
] |
typescript
|
const message: Message = { role: msg.role, content: Utilities.toString(tokenizer, msg.content) };
|
import { Message, PromptFunctions, PromptMemory, PromptSection, RenderedPromptSection, Tokenizer } from "./types";
/**
* Abstract Base class for most prompt sections.
*/
export abstract class PromptSectionBase implements PromptSection {
public readonly required: boolean;
public readonly tokens: number;
public readonly separator: string;
public readonly textPrefix: string;
/**
* Creates a new 'PromptSectionBase' instance.
* @param tokens Optional. Sizing strategy for this section. Defaults to `auto`.
* @param required Optional. Indicates if this section is required. Defaults to `true`.
* @param separator Optional. Separator to use between sections when rendering as text. Defaults to `\n`.
* @param textPrefix Optional. Prefix to use for text output. Defaults to `undefined`.
*/
public constructor(tokens: number = -1, required: boolean = true, separator: string = '\n', textPrefix: string = '') {
this.required = required;
this.tokens = tokens;
this.separator = separator;
this.textPrefix = textPrefix;
}
public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {
// Render as messages
const asMessages = await this.renderAsMessages(memory, functions, tokenizer, maxTokens);
// Convert to text
let text = asMessages.output.map((message) => PromptSectionBase.getMessageText(message)).join(this.separator);
// Calculate length
const prefixLength = tokenizer.encode(this.textPrefix).length;
const separatorLength = tokenizer.encode(this.separator).length;
let length = prefixLength + asMessages.length + ((asMessages.output.length - 1) * separatorLength);
// Truncate if fixed length
text = this.textPrefix + text;
if (this.tokens > 1.0 && length > this.tokens) {
const encoded = tokenizer.encode(text);
text = tokenizer.decode(encoded.slice(0, this.tokens));
length = this.tokens;
}
return { output: text, length: length, tooLong: length > maxTokens };
}
public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;
protected returnMessages(
|
output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {
|
// Truncate if fixed length
if (this.tokens > 1.0) {
while (length > this.tokens) {
const msg = output.pop();
const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));
length -= encoded.length;
if (length < this.tokens) {
const delta = this.tokens - length;
const truncated = tokenizer.decode(encoded.slice(0, delta));
output.push({ role: msg!.role, content: truncated });
length += delta;
}
}
}
return { output: output, length: length, tooLong: length > maxTokens };
}
public static getMessageText(message: Message): string {
let text = message.content ?? '';
if (message.function_call) {
text = JSON.stringify(message.function_call);
} else if (message.name) {
text = `${message.name} returned ${text}`;
}
return text;
}
}
|
src/PromptSectionBase.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/TextSection.ts",
"retrieved_chunk": " this.text = text;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache length\n if (this._length < 0) {\n this._length = tokenizer.encode(this.text).length;\n }\n // Return output\n return this.returnMessages([{ role: this.role, content: this.text }], this._length, tokenizer, maxTokens);",
"score": 0.9370863437652588
},
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " return { output: text, length: tokenizer.encode(text).length, tooLong: remaining < 0 };\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Start a new layout\n // - Adds all sections from the current LayoutEngine hierarchy to a flat array\n const layout: PromptSectionLayout<Message[]>[] = [];\n this.addSectionsToLayout(this.sections, layout);\n // Layout sections\n const remaining = await this.layoutSections(\n layout,",
"score": 0.9342752695083618
},
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " lines.unshift(line);\n }\n return { output: lines.join(this.separator), length: tokens, tooLong: tokens > maxTokens };\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Get messages from memory\n const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];\n // Populate messages and stay under the token budget\n let tokens = 0;\n const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;",
"score": 0.920932948589325
},
{
"filename": "src/FunctionCallMessage.ts",
"retrieved_chunk": " this.function_call = function_call;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache response text and length\n if (this._length < 0) {\n this._length = tokenizer.encode(JSON.stringify(this.function_call)).length;\n }\n // Return output\n return this.returnMessages([{ role: 'assistant', content: null, function_call: this.function_call }], this._length, tokenizer, maxTokens);\n }",
"score": 0.9168907999992371
},
{
"filename": "src/FunctionResponseMessage.ts",
"retrieved_chunk": " this.name = name;\n this.response = response;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache response text and length\n if (this._length < 0) {\n this._text = Utilities.toString(tokenizer, this.response);\n this._length = tokenizer.encode(this.name).length + tokenizer.encode(this._text).length;\n }\n // Return output",
"score": 0.9148184061050415
}
] |
typescript
|
output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {
|
import { strict as assert } from "assert";
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { VolatileMemory } from "./VolatileMemory";
import { FunctionRegistry } from "./FunctionRegistry";
import { GPT3Tokenizer } from "./GPT3Tokenizer";
export class TestSection extends PromptSectionBase {
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
return this.returnMessages([{ role: 'test', content: 'Hello Big World' }], 3, tokenizer, maxTokens);
}
}
export class MultiTestSection extends PromptSectionBase {
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
return this.returnMessages([{ role: 'test', content: 'Hello Big' },{ role: 'test', content: 'World' }], 3, tokenizer, maxTokens);
}
}
describe("PromptSectionBase", () => {
const memory = new VolatileMemory();
const functions = new FunctionRegistry();
const tokenizer = new GPT3Tokenizer();
describe("constructor", () => {
it("should create a TestSection", () => {
const section = new TestSection();
assert.equal(section.tokens, -1);
assert.equal(section.required, true);
assert.equal(section.separator, "\n");
assert.equal(section.textPrefix, "");
});
});
describe("renderAsMessages", () => {
it("should render a TestSection to an array of messages", async () => {
const section = new TestSection();
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);
assert.deepEqual(rendered.output, [{ role: "test", content: "Hello Big World" }]);
assert.equal(rendered.length, 3);
assert.equal(rendered.tooLong, false);
});
it("should truncate a fixed length TestSection", async () => {
|
const section = new TestSection(2);
|
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);
assert.deepEqual(rendered.output, [{ role: "test", content: "Hello Big" }]);
assert.equal(rendered.length, 2);
assert.equal(rendered.tooLong, false);
});
it("should identify a fixed length TestSection as being tooLong", async () => {
const section = new TestSection(2);
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 1);
assert.deepEqual(rendered.output, [{ role: "test", content: "Hello Big" }]);
assert.equal(rendered.length, 2);
assert.equal(rendered.tooLong, true);
});
it("should drop messages to truncate a fixed length MultiTestSection", async () => {
const section = new MultiTestSection(2);
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);
assert.deepEqual(rendered.output, [{ role: "test", content: "Hello Big" }]);
assert.equal(rendered.length, 2);
assert.equal(rendered.tooLong, false);
});
});
describe("renderAsText", () => {
it("should render a TestSection to a string", async () => {
const section = new TestSection();
const rendered = await section.renderAsText(memory, functions, tokenizer, 100);
assert.equal(rendered.output, "Hello Big World");
assert.equal(rendered.length, 3);
assert.equal(rendered.tooLong, false);
});
it("should truncate a fixed length TestSection", async () => {
const section = new TestSection(4, true, "\n", "user: ");
const rendered = await section.renderAsText(memory, functions, tokenizer, 100);
assert.equal(rendered.output, "user: Hello Big");
assert.equal(rendered.length, 4);
assert.equal(rendered.tooLong, false);
});
it("should identify a fixed length TestSection as being tooLong", async () => {
const section = new TestSection(4, true, "\n", "user: ");
const rendered = await section.renderAsText(memory, functions, tokenizer, 1);
assert.equal(rendered.output, "user: Hello Big");
assert.equal(rendered.length, 4);
assert.equal(rendered.tooLong, true);
});
});
});
|
src/PromptSectionBase.spec.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/GroupSection.spec.ts",
"retrieved_chunk": " });\n describe(\"renderAsMessages\", () => {\n it(\"should render a TextSection to an array of messages\", async () => {\n const section = new GroupSection([\n new TextSection(\"Hello World\", \"user\")\n ]);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"system\", content: \"Hello World\" }]);\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);",
"score": 0.9616420269012451
},
{
"filename": "src/TemplateSection.spec.ts",
"retrieved_chunk": " assert.equal(section.separator, \"\\n\");\n });\n });\n describe(\"renderAsMessages\", () => {\n it(\"should render a TemplateSection to an array of messages\", async () => {\n const section = new TemplateSection(\"Hello World\", \"user\");\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"user\", content: \"Hello World\" }]);\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);",
"score": 0.9588983058929443
},
{
"filename": "src/SystemMessage.spec.ts",
"retrieved_chunk": " it(\"should render a SystemMessage to an array of messages\", async () => {\n const section = new SystemMessage(\"Hello World\");\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"system\", content: \"Hello World\" }]);\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });\n });\n describe(\"renderAsText\", () => {\n it(\"should render a TemplateSection to a string\", async () => {",
"score": 0.9501069188117981
},
{
"filename": "src/UserMessage.spec.ts",
"retrieved_chunk": " describe(\"renderAsMessages\", () => {\n it(\"should render a UserMessage to an array of messages\", async () => {\n const section = new UserMessage(\"Hello World\");\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"user\", content: \"Hello World\" }]);\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });\n });\n describe(\"renderAsText\", () => {",
"score": 0.9498199224472046
},
{
"filename": "src/TextSection.spec.ts",
"retrieved_chunk": " const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"user\", content: \"Hello World\" }]);\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should identify a output as being too long\", async () => {\n const section = new TextSection(\"Hello World\", \"user\");\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 1);\n assert.deepEqual(rendered.output, [{ role: \"user\", content: \"Hello World\" }]);\n assert.equal(rendered.length, 2);",
"score": 0.9490242004394531
}
] |
typescript
|
const section = new TestSection(2);
|
import { strict as assert } from "assert";
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { VolatileMemory } from "./VolatileMemory";
import { FunctionRegistry } from "./FunctionRegistry";
import { GPT3Tokenizer } from "./GPT3Tokenizer";
export class TestSection extends PromptSectionBase {
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
return this.returnMessages([{ role: 'test', content: 'Hello Big World' }], 3, tokenizer, maxTokens);
}
}
export class MultiTestSection extends PromptSectionBase {
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
return this.returnMessages([{ role: 'test', content: 'Hello Big' },{ role: 'test', content: 'World' }], 3, tokenizer, maxTokens);
}
}
describe("PromptSectionBase", () => {
const memory = new VolatileMemory();
const functions = new FunctionRegistry();
const tokenizer = new GPT3Tokenizer();
describe("constructor", () => {
it("should create a TestSection", () => {
const section = new TestSection();
assert.equal(section.tokens, -1);
assert.equal(section.required, true);
assert.equal(section.separator, "\n");
assert.equal(section.textPrefix, "");
});
});
describe("renderAsMessages", () => {
it("should render a TestSection to an array of messages", async () => {
const section = new TestSection();
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);
assert.deepEqual(rendered.output, [{ role: "test", content: "Hello Big World" }]);
assert.equal(rendered.length, 3);
assert.equal(rendered.tooLong, false);
});
it("should truncate a fixed length TestSection", async () => {
const section = new TestSection(2);
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);
assert.deepEqual(rendered.output, [{ role: "test", content: "Hello Big" }]);
assert.equal(rendered.length, 2);
assert.equal(rendered.tooLong, false);
});
it("should identify a fixed length TestSection as being tooLong", async () => {
const section = new TestSection(2);
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 1);
assert.deepEqual(rendered.output, [{ role: "test", content: "Hello Big" }]);
assert.equal(rendered.length, 2);
assert.equal(rendered.tooLong, true);
});
it("should drop messages to truncate a fixed length MultiTestSection", async () => {
const section = new MultiTestSection(2);
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);
assert.deepEqual(rendered.output, [{ role: "test", content: "Hello Big" }]);
assert.equal(rendered.length, 2);
assert.equal(rendered.tooLong, false);
});
});
describe("renderAsText", () => {
it("should render a TestSection to a string", async () => {
const section = new TestSection();
const rendered = await section.renderAsText(memory, functions, tokenizer, 100);
assert.equal(rendered.output, "Hello Big World");
assert.equal(rendered.length, 3);
assert.equal(rendered.tooLong, false);
});
it("should truncate a fixed length TestSection", async () => {
|
const section = new TestSection(4, true, "\n", "user: ");
|
const rendered = await section.renderAsText(memory, functions, tokenizer, 100);
assert.equal(rendered.output, "user: Hello Big");
assert.equal(rendered.length, 4);
assert.equal(rendered.tooLong, false);
});
it("should identify a fixed length TestSection as being tooLong", async () => {
const section = new TestSection(4, true, "\n", "user: ");
const rendered = await section.renderAsText(memory, functions, tokenizer, 1);
assert.equal(rendered.output, "user: Hello Big");
assert.equal(rendered.length, 4);
assert.equal(rendered.tooLong, true);
});
});
});
|
src/PromptSectionBase.spec.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/TextSection.spec.ts",
"retrieved_chunk": " describe(\"renderAsText\", () => {\n it(\"should render a TextSection to a string\", async () => {\n const section = new TextSection(\"Hello World\", \"user\");\n const rendered = await section.renderAsText(memory, functions, tokenizer, 100);\n assert.equal(rendered.output, \"Hello World\");\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should identify a text output as being too long\", async () => {\n const section = new TextSection(\"Hello World\", \"user\");",
"score": 0.9815930128097534
},
{
"filename": "src/TemplateSection.spec.ts",
"retrieved_chunk": " it(\"should render a TemplateSection to a string\", async () => {\n const section = new TemplateSection(\"Hello World\", \"user\");\n const rendered = await section.renderAsText(memory, functions, tokenizer, 100);\n assert.equal(rendered.output, \"Hello World\");\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should identify a text output as being too long\", async () => {\n const section = new TemplateSection(\"Hello World\", \"user\");\n const rendered = await section.renderAsText(memory, functions, tokenizer, 1);",
"score": 0.963959813117981
},
{
"filename": "src/GroupSection.spec.ts",
"retrieved_chunk": " assert.equal(rendered.tooLong, false);\n });\n });\n describe(\"renderAsText\", () => {\n it(\"should render a TextSection to a string\", async () => {\n const section = new GroupSection([\n new TextSection(\"Hello World\", \"user\")\n ]);\n const rendered = await section.renderAsText(memory, functions, tokenizer, 100);\n assert.equal(rendered.output, \"Hello World\");",
"score": 0.9615212082862854
},
{
"filename": "src/TextSection.spec.ts",
"retrieved_chunk": " const rendered = await section.renderAsText(memory, functions, tokenizer, 1);\n assert.equal(rendered.output, \"Hello World\");\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, true);\n });\n it(\"should support multiple text render calls\", async () => {\n const section = new TextSection(\"Hello World\", \"user\");\n const rendered1 = await section.renderAsText(memory, functions, tokenizer, 1);\n assert.equal(rendered1.output, \"Hello World\");\n const rendered2 = await section.renderAsText(memory, functions, tokenizer, 1);",
"score": 0.9499033093452454
},
{
"filename": "src/Prompt.spec.ts",
"retrieved_chunk": " });\n });\n describe(\"renderAsText\", () => {\n it(\"should render a TextSection to a string\", async () => {\n const prompt = new Prompt([\n new TextSection(\"Hello World\", \"user\")\n ]);\n const rendered = await prompt.renderAsText(memory, functions, tokenizer, 100);\n assert.equal(rendered.output, \"Hello World\");\n assert.equal(rendered.length, 2);",
"score": 0.9438693523406982
}
] |
typescript
|
const section = new TestSection(4, true, "\n", "user: ");
|
import { strict as assert } from "assert";
import { FunctionRegistry } from "./FunctionRegistry";
import { VolatileMemory } from "./VolatileMemory";
import { GPT3Tokenizer } from "./GPT3Tokenizer";
describe("FunctionRegistry", () => {
describe("constructor", () => {
it("should create a FunctionRegistry", () => {
const registry = new FunctionRegistry();
assert.notEqual(registry, null);
assert.equal(registry.has("test"), false);
});
it("should create a FunctionRegistry with initial functions", () => {
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => { }
});
assert.notEqual(registry, null);
assert.equal(registry.has("test"), true);
});
});
describe("addFunction", () => {
it("should add a function", () => {
const registry = new FunctionRegistry();
registry.addFunction("test", async (memory, functions, tokenizer, args) => { });
assert.equal(registry.has("test"), true);
});
it("should throw when adding a function that already exists", () => {
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => { }
});
assert.throws(() => registry.addFunction("test", async (memory, functions, tokenizer, args) => { }));
});
});
describe("get", () => {
it("should get a function", () => {
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => { }
});
const fn = registry.get("test");
assert.notEqual(fn, null);
});
it("should throw when getting a function that doesn't exist", () => {
const registry = new FunctionRegistry();
assert.throws(() => registry.get("test"));
});
});
describe("has", () => {
it("should return false when a function doesn't exist", () => {
const registry = new FunctionRegistry();
assert.equal(registry.has("test"), false);
});
it("should return true when a function exists", () => {
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => { }
});
assert.equal(registry.has("test"), true);
});
});
describe("invoke", () => {
const memory = new VolatileMemory();
const tokenizer = new GPT3Tokenizer();
it("should invoke a function", async () => {
let called = false;
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => {
assert.equal(args.length, 1);
assert.equal(args[0], "Hello World");
called = true;
}
});
await registry
|
.invoke("test", memory, registry, tokenizer, ["Hello World"]);
|
assert.equal(called, true);
});
it("should throw when invoking a function that doesn't exist", () => {
const registry = new FunctionRegistry();
assert.throws(() => registry.invoke("test", memory, registry, tokenizer, ["Hello World"]));
});
});
});
|
src/FunctionRegistry.spec.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/ConversationHistory.spec.ts",
"retrieved_chunk": " ],\n \"longHistory\": [\n { role: \"user\", content: \"Hello\" },\n { role: \"assistant\", content: \"Hi! How can I help you?\" },\n { role: \"user\", content: \"I'd like to book a flight\" },\n { role: \"assistant\", content: \"Sure, where would you like to go?\" },\n ]\n });\n const functions = new FunctionRegistry();\n const tokenizer = new GPT3Tokenizer();",
"score": 0.8454333543777466
},
{
"filename": "src/SystemMessage.spec.ts",
"retrieved_chunk": " const section = new SystemMessage(\"Hello World\");\n const rendered = await section.renderAsText(memory, functions, tokenizer, 100);\n assert.equal(rendered.output, \"Hello World\");\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });\n });\n});",
"score": 0.8380029201507568
},
{
"filename": "src/Prompt.spec.ts",
"retrieved_chunk": " ]);\n const rendered = await prompt.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [\n { role: \"user\", content: \"Hello\" },\n { role: \"user\", content: \"There Big\" },\n { role: \"user\", content: \"World\" }\n ]);\n assert.equal(rendered.length, 4);\n assert.equal(rendered.tooLong, false);\n });",
"score": 0.8355768322944641
},
{
"filename": "src/FunctionRegistry.ts",
"retrieved_chunk": " }\n this._functions.set(name, value);\n }\n public invoke(key: string, memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, args: string[]): Promise<any> {\n const fn = this.get(key);\n return fn(memory, functions, tokenizer, args);\n }\n}",
"score": 0.8334032893180847
},
{
"filename": "src/TemplateSection.spec.ts",
"retrieved_chunk": " it(\"should render a template with a {{function}} and arguments\", async () => {\n const section = new TemplateSection(\"Hello {{test2 World}}\", \"user\");\n const rendered = await section.renderAsText(memory, functions, tokenizer, 100);\n assert.equal(rendered.output, \"Hello World\");\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should render a template with a {{function}} and quoted arguments\", async () => {\n const section = new TemplateSection(\"Hello {{test2 'Big World'}}\", \"user\");\n const rendered = await section.renderAsText(memory, functions, tokenizer, 100);",
"score": 0.8295104503631592
}
] |
typescript
|
.invoke("test", memory, registry, tokenizer, ["Hello World"]);
|
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A section that renders the conversation history.
*/
export class ConversationHistory extends PromptSectionBase {
public readonly variable: string;
public readonly userPrefix: string;
public readonly assistantPrefix: string;
/**
* Creates a new 'ConversationHistory' instance.
* @param variable Name of memory variable used to store the histories `Message[]`.
* @param tokens Optional. Sizing strategy for this section. Defaults to `proportional` with a value of `1.0`.
* @param required Optional. Indicates if this section is required. Defaults to `false`.
* @param userPrefix Optional. Prefix to use for user messages when rendering as text. Defaults to `user: `.
* @param assistantPrefix Optional. Prefix to use for assistant messages when rendering as text. Defaults to `assistant: `.
*/
public constructor(variable: string, tokens: number = 1.0, required: boolean = false, userPrefix: string = 'user: ', assistantPrefix: string = 'assistant: ', separator: string = '\n') {
super(tokens, required, separator);
this.variable = variable;
this.userPrefix = userPrefix;
this.assistantPrefix = assistantPrefix;
}
public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate history and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const separatorLength = tokenizer.encode(this.separator).length;
const lines: string[] = [];
for (let i = history.length - 1; i >= 0; i--) {
const msg = history[i];
const message: Message = { role: msg.role, content: Utilities
|
.toString(tokenizer, msg.content) };
|
const prefix = message.role === 'user' ? this.userPrefix : this.assistantPrefix;
const line = prefix + message.content;
const length = tokenizer.encode(line).length + (lines.length > 0 ? separatorLength : 0);
// Add initial line if required
if (lines.length === 0 && this.required) {
tokens += length;
lines.unshift(line);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add line
tokens += length;
lines.unshift(line);
}
return { output: lines.join(this.separator), length: tokens, tooLong: tokens > maxTokens };
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate messages and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const messages: Message[] = [];
for (let i = history.length - 1; i >= 0; i--) {
// Clone message
const msg = history[i];
const message: Message = Object.assign({}, msg);
if (msg.content !== null) {
message.content = Utilities.toString(tokenizer, msg.content);
}
// Get message length
const length = tokenizer.encode(PromptSectionBase.getMessageText(message)).length;
// Add initial message if required
if (messages.length === 0 && this.required) {
tokens += length;
messages.unshift(message);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add message
tokens += length;
messages.unshift(message);
}
return { output: messages, length: tokens, tooLong: tokens > maxTokens };
}
}
|
src/ConversationHistory.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " maxTokens,\n (section) => section.renderAsMessages(memory, functions, tokenizer, maxTokens),\n (section, remaining) => section.renderAsMessages(memory, functions, tokenizer, remaining)\n );\n // Build output\n const output: Message[] = [];\n for (let i = 0; i < layout.length; i++) {\n const section = layout[i];\n if (section.layout) {\n output.push(...section.layout.output);",
"score": 0.8467205762863159
},
{
"filename": "src/ConversationHistory.spec.ts",
"retrieved_chunk": " assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should truncate its output to match available budget\", async () => {\n const section = new ConversationHistory('history', 1);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 1);\n assert.deepEqual(rendered.output, [\n { role: \"assistant\", content: \"Hi\" },\n ]);\n assert.equal(rendered.length, 1);",
"score": 0.8451521396636963
},
{
"filename": "src/TextSection.ts",
"retrieved_chunk": " this.text = text;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache length\n if (this._length < 0) {\n this._length = tokenizer.encode(this.text).length;\n }\n // Return output\n return this.returnMessages([{ role: this.role, content: this.text }], this._length, tokenizer, maxTokens);",
"score": 0.837876558303833
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;\n protected returnMessages(output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {\n // Truncate if fixed length\n if (this.tokens > 1.0) {\n while (length > this.tokens) {\n const msg = output.pop();\n const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));\n length -= encoded.length;\n if (length < this.tokens) {\n const delta = this.tokens - length;",
"score": 0.8366134762763977
},
{
"filename": "src/ConversationHistory.spec.ts",
"retrieved_chunk": " const section = new ConversationHistory('longHistory', 100);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 2);\n assert.deepEqual(rendered.output, []);\n assert.equal(rendered.length, 0);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should always render the last message when section is required\", async () => {\n const section = new ConversationHistory('longHistory', 100, true);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 2);\n assert.deepEqual(rendered.output, [",
"score": 0.8362060785293579
}
] |
typescript
|
.toString(tokenizer, msg.content) };
|
import { strict as assert } from "assert";
import { ConversationHistory } from "./ConversationHistory";
import { VolatileMemory } from "./VolatileMemory";
import { FunctionRegistry } from "./FunctionRegistry";
import { GPT3Tokenizer } from "./GPT3Tokenizer";
describe("ConversationHistory", () => {
const memory = new VolatileMemory({
"history": [
{ role: "user", content: "Hello" },
{ role: "assistant", content: "Hi" },
],
"longHistory": [
{ role: "user", content: "Hello" },
{ role: "assistant", content: "Hi! How can I help you?" },
{ role: "user", content: "I'd like to book a flight" },
{ role: "assistant", content: "Sure, where would you like to go?" },
]
});
const functions = new FunctionRegistry();
const tokenizer = new GPT3Tokenizer();
describe("constructor", () => {
it("should create a ConversationHistory", () => {
const section = new ConversationHistory('history');
assert.equal(section.variable, 'history');
assert.equal(section.tokens, 1.0);
assert.equal(section.required, false);
assert.equal(section.separator, "\n");
assert.equal(section.userPrefix, "user: ");
assert.equal(section.assistantPrefix, "assistant: ");
assert.equal(section.textPrefix, "");
});
});
describe("renderAsMessages", () => {
it("should render a ConversationHistory to an array of messages", async () => {
const section = new ConversationHistory('history', 100);
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);
assert.deepEqual(rendered.output, [
{ role: "user", content: "Hello" },
{ role: "assistant", content: "Hi" },
]);
assert.equal(rendered.length, 2);
assert.equal(rendered.tooLong, false);
});
it("should truncate its output to match available budget", async () => {
const section = new ConversationHistory('history', 1);
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 1);
assert.deepEqual(rendered.output, [
{ role: "assistant", content: "Hi" },
]);
assert.equal(rendered.length, 1);
assert.equal(rendered.tooLong, false);
});
it("should render nothing when there's no history", async () => {
const section = new ConversationHistory('nohistory', 100);
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);
assert.deepEqual(rendered.output, []);
assert.equal(rendered.length, 0);
assert.equal(rendered.tooLong, false);
});
it("should render nothing for a long last message", async () => {
const section = new ConversationHistory('longHistory', 100);
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 2);
assert.deepEqual(rendered.output, []);
assert.equal(rendered.length, 0);
assert.equal(rendered.tooLong, false);
});
it("should always render the last message when section is required", async () => {
const section = new
|
ConversationHistory('longHistory', 100, true);
|
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 2);
assert.deepEqual(rendered.output, [
{ role: "assistant", content: "Sure, where would you like to go?" },
]);
assert.equal(rendered.length, 9);
assert.equal(rendered.tooLong, true);
});
});
describe("renderAsText", () => {
it("should render a ConversationHistory to a string", async () => {
const section = new ConversationHistory('history', 100);
const rendered = await section.renderAsText(memory, functions, tokenizer, 100);
assert.equal(rendered.output, "user: Hello\nassistant: Hi");
assert.equal(rendered.length, 8);
assert.equal(rendered.tooLong, false);
});
it("should truncate its output to match available budget", async () => {
const section = new ConversationHistory('history', 1);
const rendered = await section.renderAsText(memory, functions, tokenizer, 4);
assert.equal(rendered.output, "assistant: Hi");
assert.equal(rendered.length, 4);
assert.equal(rendered.tooLong, false);
});
it("should render nothing when there's no history", async () => {
const section = new ConversationHistory('nohistory', 100);
const rendered = await section.renderAsText(memory, functions, tokenizer, 100);
assert.equal(rendered.output, "");
assert.equal(rendered.length, 0);
assert.equal(rendered.tooLong, false);
});
it("should render nothing for a long last message", async () => {
const section = new ConversationHistory('longHistory', 100);
const rendered = await section.renderAsText(memory, functions, tokenizer, 2);
assert.equal(rendered.output, "");
assert.equal(rendered.length, 0);
assert.equal(rendered.tooLong, false);
});
it("should always render the last message when section is required", async () => {
const section = new ConversationHistory('longHistory', 100, true);
const rendered = await section.renderAsText(memory, functions, tokenizer, 2);
assert.equal(rendered.output, "assistant: Sure, where would you like to go?");
assert.equal(rendered.length, 12);
assert.equal(rendered.tooLong, true);
});
});
});
|
src/ConversationHistory.spec.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/PromptSectionBase.spec.ts",
"retrieved_chunk": " it(\"should render a TestSection to an array of messages\", async () => {\n const section = new TestSection();\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"test\", content: \"Hello Big World\" }]);\n assert.equal(rendered.length, 3);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should truncate a fixed length TestSection\", async () => {\n const section = new TestSection(2);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);",
"score": 0.9128158688545227
},
{
"filename": "src/TextSection.spec.ts",
"retrieved_chunk": " const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"user\", content: \"Hello World\" }]);\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should identify a output as being too long\", async () => {\n const section = new TextSection(\"Hello World\", \"user\");\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 1);\n assert.deepEqual(rendered.output, [{ role: \"user\", content: \"Hello World\" }]);\n assert.equal(rendered.length, 2);",
"score": 0.9118609428405762
},
{
"filename": "src/TextSection.spec.ts",
"retrieved_chunk": " assert.equal(rendered.tooLong, true);\n });\n it(\"should support multiple message render calls\", async () => {\n const section = new TextSection(\"Hello World\", \"user\");\n const rendered1 = await section.renderAsMessages(memory, functions, tokenizer, 1);\n assert.deepEqual(rendered1.output, [{ role: \"user\", content: \"Hello World\" }]);\n const rendered2 = await section.renderAsMessages(memory, functions, tokenizer, 1);\n assert.deepEqual(rendered2.output, [{ role: \"user\", content: \"Hello World\" }]);\n });\n });",
"score": 0.9088611602783203
},
{
"filename": "src/PromptSectionBase.spec.ts",
"retrieved_chunk": " assert.deepEqual(rendered.output, [{ role: \"test\", content: \"Hello Big\" }]);\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should identify a fixed length TestSection as being tooLong\", async () => {\n const section = new TestSection(2);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 1);\n assert.deepEqual(rendered.output, [{ role: \"test\", content: \"Hello Big\" }]);\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, true);",
"score": 0.9009230136871338
},
{
"filename": "src/PromptSectionBase.spec.ts",
"retrieved_chunk": " });\n it(\"should drop messages to truncate a fixed length MultiTestSection\", async () => {\n const section = new MultiTestSection(2);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"test\", content: \"Hello Big\" }]);\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });\n });\n describe(\"renderAsText\", () => {",
"score": 0.8996451497077942
}
] |
typescript
|
ConversationHistory('longHistory', 100, true);
|
import { Message, PromptFunctions, PromptMemory, PromptSection, RenderedPromptSection, Tokenizer } from "./types";
/**
* Abstract Base class for most prompt sections.
*/
export abstract class PromptSectionBase implements PromptSection {
public readonly required: boolean;
public readonly tokens: number;
public readonly separator: string;
public readonly textPrefix: string;
/**
* Creates a new 'PromptSectionBase' instance.
* @param tokens Optional. Sizing strategy for this section. Defaults to `auto`.
* @param required Optional. Indicates if this section is required. Defaults to `true`.
* @param separator Optional. Separator to use between sections when rendering as text. Defaults to `\n`.
* @param textPrefix Optional. Prefix to use for text output. Defaults to `undefined`.
*/
public constructor(tokens: number = -1, required: boolean = true, separator: string = '\n', textPrefix: string = '') {
this.required = required;
this.tokens = tokens;
this.separator = separator;
this.textPrefix = textPrefix;
}
public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {
// Render as messages
const asMessages = await this.renderAsMessages(memory, functions, tokenizer, maxTokens);
// Convert to text
let text = asMessages.output.map((message) => PromptSectionBase.getMessageText(message)).join(this.separator);
// Calculate length
const prefixLength = tokenizer.encode(this.textPrefix).length;
const separatorLength = tokenizer.encode(this.separator).length;
let length = prefixLength + asMessages.length + ((asMessages.output.length - 1) * separatorLength);
// Truncate if fixed length
text = this.textPrefix + text;
if (this.tokens > 1.0 && length > this.tokens) {
const encoded = tokenizer.encode(text);
text = tokenizer.decode(encoded.slice(0, this.tokens));
length = this.tokens;
}
return { output: text, length: length, tooLong: length > maxTokens };
}
public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;
protected returnMessages(output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {
// Truncate if fixed length
if (this.tokens > 1.0) {
while (length > this.tokens) {
const msg = output.pop();
const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));
length -= encoded.length;
if (length < this.tokens) {
const delta = this.tokens - length;
const truncated = tokenizer.decode(encoded.slice(0, delta));
output.push({ role: msg!.role, content: truncated });
length += delta;
}
}
}
return { output: output, length: length, tooLong: length > maxTokens };
}
public static getMessageText(message: Message): string {
|
let text = message.content ?? '';
|
if (message.function_call) {
text = JSON.stringify(message.function_call);
} else if (message.name) {
text = `${message.name} returned ${text}`;
}
return text;
}
}
|
src/PromptSectionBase.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " // Add message\n tokens += length;\n messages.unshift(message);\n }\n return { output: messages, length: tokens, tooLong: tokens > maxTokens };\n }\n}",
"score": 0.8393882513046265
},
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " const messages: Message[] = [];\n for (let i = history.length - 1; i >= 0; i--) {\n // Clone message\n const msg = history[i];\n const message: Message = Object.assign({}, msg);\n if (msg.content !== null) {\n message.content = Utilities.toString(tokenizer, msg.content);\n }\n // Get message length\n const length = tokenizer.encode(PromptSectionBase.getMessageText(message)).length;",
"score": 0.827889621257782
},
{
"filename": "src/TemplateSection.ts",
"retrieved_chunk": " const text = renderedParts.join('');\n const length = tokenizer.encode(text).length;\n // Return output\n return this.returnMessages([{ role: this.role, content: text }], length, tokenizer, maxTokens);\n }\n private parseTemplate(): void {\n // Parse template\n let part = '';\n let state = ParseState.inText;\n let stringDelim = '';",
"score": 0.8268886804580688
},
{
"filename": "src/types.ts",
"retrieved_chunk": " tooLong: boolean;\n}\nexport interface Message<TContent = string> {\n /**\n * The messages role. Typically 'system', 'user', 'assistant', 'function'.\n */\n role: string;\n /**\n * Text of the message.\n */",
"score": 0.8168059587478638
},
{
"filename": "src/FunctionResponseMessage.ts",
"retrieved_chunk": " return this.returnMessages([{ role: 'function', name: this.name, content: this._text }], this._length, tokenizer, maxTokens);\n }\n}",
"score": 0.8132710456848145
}
] |
typescript
|
let text = message.content ?? '';
|
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A template section that will be rendered as a message.
* @remarks
* This section type is used to render a template as a message. The template can contain
* parameters that will be replaced with values from memory or call functions to generate
* dynamic content.
*
* Template syntax:
* - `{{$memoryKey}}` - Renders the value of the specified memory key.
* - `{{functionName}}` - Calls the specified function and renders the result.
* - `{{functionName arg1 arg2 ...}}` - Calls the specified function with the provided list of arguments.
*
* Function arguments are optional and separated by spaces. They can be quoted using `'`, `"`, or `\`` delimiters.
*/
export class TemplateSection extends PromptSectionBase {
private _parts: PartRenderer[] = [];
public readonly template: string;
public readonly role: string;
/**
* Creates a new 'TemplateSection' instance.
* @param template Template to use for this section.
* @param role Message role to use for this section.
* @param tokens Optional. Sizing strategy for this section. Defaults to `auto`.
* @param required Optional. Indicates if this section is required. Defaults to `true`.
* @param separator Optional. Separator to use between sections when rendering as text. Defaults to `\n`.
* @param textPrefix Optional. Prefix to use for text output. Defaults to `undefined`.
*/
public constructor(template: string, role: string, tokens: number = -1, required: boolean = true, separator: string = '\n', textPrefix?: string) {
super(tokens, required, separator, textPrefix);
this.template = template;
this.role = role;
this.parseTemplate();
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Render parts in parallel
const renderedParts = await Promise.all(this._parts.map((part) => part(memory, functions, tokenizer, maxTokens)));
// Join all parts
const text = renderedParts.join('');
const length = tokenizer.encode(text).length;
// Return output
return this.returnMessages([{ role: this.role, content: text }], length, tokenizer, maxTokens);
}
private parseTemplate(): void {
// Parse template
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < this.template.length; i++) {
const char = this.template[i];
switch (state) {
case ParseState.inText:
if (char === '{' && this.template[i + 1] === '{') {
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
part = '';
}
state = ParseState.inParameter;
i++;
} else {
part += char;
}
break;
case ParseState.inParameter:
if (char === '}' && this.template[i + 1] === '}') {
if (part.length > 0) {
if (part[0] === '$') {
this._parts.push(this.createVariableRenderer(part.substring(1)));
} else {
this._parts.push(this.createFunctionRenderer(part));
}
part = '';
}
state = ParseState.inText;
i++;
} else if (["'", '"', '`'].includes(char)) {
stringDelim = char;
state = ParseState.inString;
part += char;
} else {
part += char;
}
break;
case ParseState.inString:
part += char;
if (char === stringDelim) {
state = ParseState.inParameter;
}
break;
}
}
// Ensure we ended in the correct state
if (state !== ParseState.inText) {
throw new Error(`Invalid template: ${this.template}`);
}
// Add final part
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
}
}
private createTextRenderer(text: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
return Promise.resolve(text);
};
}
private createVariableRenderer(name: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
const vaue = memory.get(name);
return Promise.resolve(
|
Utilities.toString(tokenizer, vaue));
|
};
}
private createFunctionRenderer(param: string): PartRenderer {
let name = '';
let args: string[] = [];
function savePart() {
if (part.length > 0) {
if (!name) {
name = part;
} else {
args.push(part);
}
part = '';
}
}
// Parse function name and args
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < param.length; i++) {
const char = param[i];
switch (state) {
case ParseState.inText:
if (["'", '"', '`'].includes(char)) {
savePart();
stringDelim = char;
state = ParseState.inString;
} else if (char == ' ') {
savePart();
} else {
part += char;
}
break;
case ParseState.inString:
if (char === stringDelim) {
savePart();
state = ParseState.inText;
} else {
part += char;
}
break;
}
}
// Add final part
savePart();
// Return renderer
return async (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
const value = await functions.invoke(name, memory, functions, tokenizer, args);
return Utilities.toString(tokenizer, value);
};
}
}
type PartRenderer = (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number) => Promise<string>;
enum ParseState {
inText,
inParameter,
inString
}
|
src/TemplateSection.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " this.variable = variable;\n this.userPrefix = userPrefix;\n this.assistantPrefix = assistantPrefix;\n }\n public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {\n // Get messages from memory\n const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];\n // Populate history and stay under the token budget\n let tokens = 0;\n const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;",
"score": 0.8662111163139343
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " this.textPrefix = textPrefix;\n }\n public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {\n // Render as messages\n const asMessages = await this.renderAsMessages(memory, functions, tokenizer, maxTokens);\n // Convert to text\n let text = asMessages.output.map((message) => PromptSectionBase.getMessageText(message)).join(this.separator);\n // Calculate length\n const prefixLength = tokenizer.encode(this.textPrefix).length;\n const separatorLength = tokenizer.encode(this.separator).length;",
"score": 0.8622737526893616
},
{
"filename": "src/TextSection.ts",
"retrieved_chunk": " this.text = text;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache length\n if (this._length < 0) {\n this._length = tokenizer.encode(this.text).length;\n }\n // Return output\n return this.returnMessages([{ role: this.role, content: this.text }], this._length, tokenizer, maxTokens);",
"score": 0.8486539125442505
},
{
"filename": "src/FunctionResponseMessage.ts",
"retrieved_chunk": " this.name = name;\n this.response = response;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache response text and length\n if (this._length < 0) {\n this._text = Utilities.toString(tokenizer, this.response);\n this._length = tokenizer.encode(this.name).length + tokenizer.encode(this._text).length;\n }\n // Return output",
"score": 0.8468061685562134
},
{
"filename": "src/types.ts",
"retrieved_chunk": "}\nexport interface PromptFunctions {\n has(name: string): boolean;\n get(name: string): PromptFunction;\n invoke(name: string, memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, args: string[]): Promise<any>;\n}\nexport interface Tokenizer {\n decode(tokens: number[]): string;\n encode(text: string): number[];\n}",
"score": 0.8430838584899902
}
] |
typescript
|
Utilities.toString(tokenizer, vaue));
|
import { Message, PromptFunctions, PromptMemory, PromptSection, RenderedPromptSection, Tokenizer } from "./types";
/**
* Abstract Base class for most prompt sections.
*/
export abstract class PromptSectionBase implements PromptSection {
public readonly required: boolean;
public readonly tokens: number;
public readonly separator: string;
public readonly textPrefix: string;
/**
* Creates a new 'PromptSectionBase' instance.
* @param tokens Optional. Sizing strategy for this section. Defaults to `auto`.
* @param required Optional. Indicates if this section is required. Defaults to `true`.
* @param separator Optional. Separator to use between sections when rendering as text. Defaults to `\n`.
* @param textPrefix Optional. Prefix to use for text output. Defaults to `undefined`.
*/
public constructor(tokens: number = -1, required: boolean = true, separator: string = '\n', textPrefix: string = '') {
this.required = required;
this.tokens = tokens;
this.separator = separator;
this.textPrefix = textPrefix;
}
public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {
// Render as messages
const asMessages = await this.renderAsMessages(memory, functions, tokenizer, maxTokens);
// Convert to text
let text = asMessages.output.map((message) => PromptSectionBase.getMessageText(message)).join(this.separator);
// Calculate length
const prefixLength = tokenizer.encode(this.textPrefix).length;
const separatorLength = tokenizer.encode(this.separator).length;
let length = prefixLength + asMessages.length + ((asMessages.output.length - 1) * separatorLength);
// Truncate if fixed length
text = this.textPrefix + text;
if (this.tokens > 1.0 && length > this.tokens) {
const encoded = tokenizer.encode(text);
text = tokenizer.decode(encoded.slice(0, this.tokens));
length = this.tokens;
}
return { output: text, length: length, tooLong: length > maxTokens };
}
public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;
protected returnMessages(output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {
// Truncate if fixed length
if (this.tokens > 1.0) {
while (length > this.tokens) {
const msg = output.pop();
const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));
length -= encoded.length;
if (length < this.tokens) {
const delta = this.tokens - length;
const truncated = tokenizer.decode(encoded.slice(0, delta));
output.push({ role: msg
|
!.role, content: truncated });
|
length += delta;
}
}
}
return { output: output, length: length, tooLong: length > maxTokens };
}
public static getMessageText(message: Message): string {
let text = message.content ?? '';
if (message.function_call) {
text = JSON.stringify(message.function_call);
} else if (message.name) {
text = `${message.name} returned ${text}`;
}
return text;
}
}
|
src/PromptSectionBase.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " const messages: Message[] = [];\n for (let i = history.length - 1; i >= 0; i--) {\n // Clone message\n const msg = history[i];\n const message: Message = Object.assign({}, msg);\n if (msg.content !== null) {\n message.content = Utilities.toString(tokenizer, msg.content);\n }\n // Get message length\n const length = tokenizer.encode(PromptSectionBase.getMessageText(message)).length;",
"score": 0.885860800743103
},
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " lines.unshift(line);\n }\n return { output: lines.join(this.separator), length: tokens, tooLong: tokens > maxTokens };\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Get messages from memory\n const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];\n // Populate messages and stay under the token budget\n let tokens = 0;\n const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;",
"score": 0.8777462244033813
},
{
"filename": "src/TextSection.ts",
"retrieved_chunk": " this.text = text;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache length\n if (this._length < 0) {\n this._length = tokenizer.encode(this.text).length;\n }\n // Return output\n return this.returnMessages([{ role: this.role, content: this.text }], this._length, tokenizer, maxTokens);",
"score": 0.8748929500579834
},
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " return { output: text, length: tokenizer.encode(text).length, tooLong: remaining < 0 };\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Start a new layout\n // - Adds all sections from the current LayoutEngine hierarchy to a flat array\n const layout: PromptSectionLayout<Message[]>[] = [];\n this.addSectionsToLayout(this.sections, layout);\n // Layout sections\n const remaining = await this.layoutSections(\n layout,",
"score": 0.8724621534347534
},
{
"filename": "src/Prompt.spec.ts",
"retrieved_chunk": " const prompt = new Prompt([\n new Prompt([\n new TextSection(\"Hello\", \"user\")\n ]),\n new TextSection(\"World\", \"user\")\n ]);\n const rendered = await prompt.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"user\", content: \"Hello\" }, { role: \"user\", content: \"World\" }]);\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);",
"score": 0.8615802526473999
}
] |
typescript
|
!.role, content: truncated });
|
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A section that renders the conversation history.
*/
export class ConversationHistory extends PromptSectionBase {
public readonly variable: string;
public readonly userPrefix: string;
public readonly assistantPrefix: string;
/**
* Creates a new 'ConversationHistory' instance.
* @param variable Name of memory variable used to store the histories `Message[]`.
* @param tokens Optional. Sizing strategy for this section. Defaults to `proportional` with a value of `1.0`.
* @param required Optional. Indicates if this section is required. Defaults to `false`.
* @param userPrefix Optional. Prefix to use for user messages when rendering as text. Defaults to `user: `.
* @param assistantPrefix Optional. Prefix to use for assistant messages when rendering as text. Defaults to `assistant: `.
*/
public constructor(variable: string, tokens: number = 1.0, required: boolean = false, userPrefix: string = 'user: ', assistantPrefix: string = 'assistant: ', separator: string = '\n') {
super(tokens, required, separator);
this.variable = variable;
this.userPrefix = userPrefix;
this.assistantPrefix = assistantPrefix;
}
public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate history and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const separatorLength = tokenizer.encode(this.separator).length;
const lines: string[] = [];
for (let i = history.length - 1; i >= 0; i--) {
const msg = history[i];
const message: Message = { role: msg.role, content: Utilities.toString(tokenizer, msg.content) };
const prefix = message.role === 'user' ? this.userPrefix : this.assistantPrefix;
const line = prefix + message.content;
const length = tokenizer.encode(line).length + (lines.length > 0 ? separatorLength : 0);
// Add initial line if required
|
if (lines.length === 0 && this.required) {
|
tokens += length;
lines.unshift(line);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add line
tokens += length;
lines.unshift(line);
}
return { output: lines.join(this.separator), length: tokens, tooLong: tokens > maxTokens };
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate messages and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const messages: Message[] = [];
for (let i = history.length - 1; i >= 0; i--) {
// Clone message
const msg = history[i];
const message: Message = Object.assign({}, msg);
if (msg.content !== null) {
message.content = Utilities.toString(tokenizer, msg.content);
}
// Get message length
const length = tokenizer.encode(PromptSectionBase.getMessageText(message)).length;
// Add initial message if required
if (messages.length === 0 && this.required) {
tokens += length;
messages.unshift(message);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add message
tokens += length;
messages.unshift(message);
}
return { output: messages, length: tokens, tooLong: tokens > maxTokens };
}
}
|
src/ConversationHistory.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " let length = prefixLength + asMessages.length + ((asMessages.output.length - 1) * separatorLength);\n // Truncate if fixed length\n text = this.textPrefix + text;\n if (this.tokens > 1.0 && length > this.tokens) {\n const encoded = tokenizer.encode(text);\n text = tokenizer.decode(encoded.slice(0, this.tokens));\n length = this.tokens;\n }\n return { output: text, length: length, tooLong: length > maxTokens };\n }",
"score": 0.8409548997879028
},
{
"filename": "src/ConversationHistory.spec.ts",
"retrieved_chunk": " const rendered = await section.renderAsText(memory, functions, tokenizer, 2);\n assert.equal(rendered.output, \"\");\n assert.equal(rendered.length, 0);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should always render the last message when section is required\", async () => {\n const section = new ConversationHistory('longHistory', 100, true);\n const rendered = await section.renderAsText(memory, functions, tokenizer, 2);\n assert.equal(rendered.output, \"assistant: Sure, where would you like to go?\");\n assert.equal(rendered.length, 12);",
"score": 0.8262797594070435
},
{
"filename": "src/ConversationHistory.spec.ts",
"retrieved_chunk": " const section = new ConversationHistory('longHistory', 100);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 2);\n assert.deepEqual(rendered.output, []);\n assert.equal(rendered.length, 0);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should always render the last message when section is required\", async () => {\n const section = new ConversationHistory('longHistory', 100, true);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 2);\n assert.deepEqual(rendered.output, [",
"score": 0.821076512336731
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " this.textPrefix = textPrefix;\n }\n public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {\n // Render as messages\n const asMessages = await this.renderAsMessages(memory, functions, tokenizer, maxTokens);\n // Convert to text\n let text = asMessages.output.map((message) => PromptSectionBase.getMessageText(message)).join(this.separator);\n // Calculate length\n const prefixLength = tokenizer.encode(this.textPrefix).length;\n const separatorLength = tokenizer.encode(this.separator).length;",
"score": 0.8187659978866577
},
{
"filename": "src/GroupSection.ts",
"retrieved_chunk": " super(tokens, required, separator, textPrefix);\n this._layoutEngine = new LayoutEngine(sections, tokens, required, separator);\n this.sections = sections;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Render sections to text\n const { output, length, tooLong } = await this._layoutEngine.renderAsText(memory, functions, tokenizer, maxTokens);\n // Return output as a single message\n return this.returnMessages([{ role: this.role, content: output }], length, tokenizer, maxTokens);",
"score": 0.8155971765518188
}
] |
typescript
|
if (lines.length === 0 && this.required) {
|
import { Message, PromptFunctions, PromptMemory, PromptSection, RenderedPromptSection, Tokenizer } from "./types";
/**
* Abstract Base class for most prompt sections.
*/
export abstract class PromptSectionBase implements PromptSection {
public readonly required: boolean;
public readonly tokens: number;
public readonly separator: string;
public readonly textPrefix: string;
/**
* Creates a new 'PromptSectionBase' instance.
* @param tokens Optional. Sizing strategy for this section. Defaults to `auto`.
* @param required Optional. Indicates if this section is required. Defaults to `true`.
* @param separator Optional. Separator to use between sections when rendering as text. Defaults to `\n`.
* @param textPrefix Optional. Prefix to use for text output. Defaults to `undefined`.
*/
public constructor(tokens: number = -1, required: boolean = true, separator: string = '\n', textPrefix: string = '') {
this.required = required;
this.tokens = tokens;
this.separator = separator;
this.textPrefix = textPrefix;
}
public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {
// Render as messages
const asMessages = await this.renderAsMessages(memory, functions, tokenizer, maxTokens);
// Convert to text
let text = asMessages.output.map((message) => PromptSectionBase.getMessageText(message)).join(this.separator);
// Calculate length
const prefixLength = tokenizer.encode(this.textPrefix).length;
const separatorLength = tokenizer.encode(this.separator).length;
let length = prefixLength + asMessages.length + ((asMessages.output.length - 1) * separatorLength);
// Truncate if fixed length
text = this.textPrefix + text;
if (this.tokens > 1.0 && length > this.tokens) {
const encoded = tokenizer.encode(text);
text = tokenizer.decode(encoded.slice(0, this.tokens));
length = this.tokens;
}
return { output: text, length: length, tooLong: length > maxTokens };
}
public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;
protected returnMessages(output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {
// Truncate if fixed length
if (this.tokens > 1.0) {
while (length > this.tokens) {
const msg = output.pop();
const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));
length -= encoded.length;
if (length < this.tokens) {
const delta = this.tokens - length;
const truncated = tokenizer.decode(encoded.slice(0, delta));
output.push({ role: msg!.role, content: truncated });
length += delta;
}
}
}
return { output: output, length: length, tooLong: length > maxTokens };
}
public static getMessageText(message: Message): string {
let text = message.content ?? '';
if (message.function_call) {
text = JSON.stringify(message.function_call);
} else if
|
(message.name) {
|
text = `${message.name} returned ${text}`;
}
return text;
}
}
|
src/PromptSectionBase.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " // Add message\n tokens += length;\n messages.unshift(message);\n }\n return { output: messages, length: tokens, tooLong: tokens > maxTokens };\n }\n}",
"score": 0.8754774332046509
},
{
"filename": "src/FunctionResponseMessage.ts",
"retrieved_chunk": " return this.returnMessages([{ role: 'function', name: this.name, content: this._text }], this._length, tokenizer, maxTokens);\n }\n}",
"score": 0.856917679309845
},
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " maxTokens,\n (section) => section.renderAsMessages(memory, functions, tokenizer, maxTokens),\n (section, remaining) => section.renderAsMessages(memory, functions, tokenizer, remaining)\n );\n // Build output\n const output: Message[] = [];\n for (let i = 0; i < layout.length; i++) {\n const section = layout[i];\n if (section.layout) {\n output.push(...section.layout.output);",
"score": 0.8352315425872803
},
{
"filename": "src/types.ts",
"retrieved_chunk": " tooLong: boolean;\n}\nexport interface Message<TContent = string> {\n /**\n * The messages role. Typically 'system', 'user', 'assistant', 'function'.\n */\n role: string;\n /**\n * Text of the message.\n */",
"score": 0.8304286599159241
},
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " return { output: text, length: tokenizer.encode(text).length, tooLong: remaining < 0 };\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Start a new layout\n // - Adds all sections from the current LayoutEngine hierarchy to a flat array\n const layout: PromptSectionLayout<Message[]>[] = [];\n this.addSectionsToLayout(this.sections, layout);\n // Layout sections\n const remaining = await this.layoutSections(\n layout,",
"score": 0.8233548402786255
}
] |
typescript
|
(message.name) {
|
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A section that renders the conversation history.
*/
export class ConversationHistory extends PromptSectionBase {
public readonly variable: string;
public readonly userPrefix: string;
public readonly assistantPrefix: string;
/**
* Creates a new 'ConversationHistory' instance.
* @param variable Name of memory variable used to store the histories `Message[]`.
* @param tokens Optional. Sizing strategy for this section. Defaults to `proportional` with a value of `1.0`.
* @param required Optional. Indicates if this section is required. Defaults to `false`.
* @param userPrefix Optional. Prefix to use for user messages when rendering as text. Defaults to `user: `.
* @param assistantPrefix Optional. Prefix to use for assistant messages when rendering as text. Defaults to `assistant: `.
*/
public constructor(variable: string, tokens: number = 1.0, required: boolean = false, userPrefix: string = 'user: ', assistantPrefix: string = 'assistant: ', separator: string = '\n') {
super(tokens, required, separator);
this.variable = variable;
this.userPrefix = userPrefix;
this.assistantPrefix = assistantPrefix;
}
public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate history and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const separatorLength = tokenizer.encode(this.separator).length;
const lines: string[] = [];
for (let i = history.length - 1; i >= 0; i--) {
const msg = history[i];
const message: Message = { role
|
: msg.role, content: Utilities.toString(tokenizer, msg.content) };
|
const prefix = message.role === 'user' ? this.userPrefix : this.assistantPrefix;
const line = prefix + message.content;
const length = tokenizer.encode(line).length + (lines.length > 0 ? separatorLength : 0);
// Add initial line if required
if (lines.length === 0 && this.required) {
tokens += length;
lines.unshift(line);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add line
tokens += length;
lines.unshift(line);
}
return { output: lines.join(this.separator), length: tokens, tooLong: tokens > maxTokens };
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate messages and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const messages: Message[] = [];
for (let i = history.length - 1; i >= 0; i--) {
// Clone message
const msg = history[i];
const message: Message = Object.assign({}, msg);
if (msg.content !== null) {
message.content = Utilities.toString(tokenizer, msg.content);
}
// Get message length
const length = tokenizer.encode(PromptSectionBase.getMessageText(message)).length;
// Add initial message if required
if (messages.length === 0 && this.required) {
tokens += length;
messages.unshift(message);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add message
tokens += length;
messages.unshift(message);
}
return { output: messages, length: tokens, tooLong: tokens > maxTokens };
}
}
|
src/ConversationHistory.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " maxTokens,\n (section) => section.renderAsMessages(memory, functions, tokenizer, maxTokens),\n (section, remaining) => section.renderAsMessages(memory, functions, tokenizer, remaining)\n );\n // Build output\n const output: Message[] = [];\n for (let i = 0; i < layout.length; i++) {\n const section = layout[i];\n if (section.layout) {\n output.push(...section.layout.output);",
"score": 0.8466004729270935
},
{
"filename": "src/ConversationHistory.spec.ts",
"retrieved_chunk": " assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should truncate its output to match available budget\", async () => {\n const section = new ConversationHistory('history', 1);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 1);\n assert.deepEqual(rendered.output, [\n { role: \"assistant\", content: \"Hi\" },\n ]);\n assert.equal(rendered.length, 1);",
"score": 0.8443402051925659
},
{
"filename": "src/TextSection.ts",
"retrieved_chunk": " this.text = text;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache length\n if (this._length < 0) {\n this._length = tokenizer.encode(this.text).length;\n }\n // Return output\n return this.returnMessages([{ role: this.role, content: this.text }], this._length, tokenizer, maxTokens);",
"score": 0.8381252288818359
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;\n protected returnMessages(output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {\n // Truncate if fixed length\n if (this.tokens > 1.0) {\n while (length > this.tokens) {\n const msg = output.pop();\n const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));\n length -= encoded.length;\n if (length < this.tokens) {\n const delta = this.tokens - length;",
"score": 0.8360037207603455
},
{
"filename": "src/ConversationHistory.spec.ts",
"retrieved_chunk": " const section = new ConversationHistory('longHistory', 100);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 2);\n assert.deepEqual(rendered.output, []);\n assert.equal(rendered.length, 0);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should always render the last message when section is required\", async () => {\n const section = new ConversationHistory('longHistory', 100, true);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 2);\n assert.deepEqual(rendered.output, [",
"score": 0.834741473197937
}
] |
typescript
|
: msg.role, content: Utilities.toString(tokenizer, msg.content) };
|
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A template section that will be rendered as a message.
* @remarks
* This section type is used to render a template as a message. The template can contain
* parameters that will be replaced with values from memory or call functions to generate
* dynamic content.
*
* Template syntax:
* - `{{$memoryKey}}` - Renders the value of the specified memory key.
* - `{{functionName}}` - Calls the specified function and renders the result.
* - `{{functionName arg1 arg2 ...}}` - Calls the specified function with the provided list of arguments.
*
* Function arguments are optional and separated by spaces. They can be quoted using `'`, `"`, or `\`` delimiters.
*/
export class TemplateSection extends PromptSectionBase {
private _parts: PartRenderer[] = [];
public readonly template: string;
public readonly role: string;
/**
* Creates a new 'TemplateSection' instance.
* @param template Template to use for this section.
* @param role Message role to use for this section.
* @param tokens Optional. Sizing strategy for this section. Defaults to `auto`.
* @param required Optional. Indicates if this section is required. Defaults to `true`.
* @param separator Optional. Separator to use between sections when rendering as text. Defaults to `\n`.
* @param textPrefix Optional. Prefix to use for text output. Defaults to `undefined`.
*/
public constructor(template: string, role: string, tokens: number = -1, required: boolean = true, separator: string = '\n', textPrefix?: string) {
super(tokens, required, separator, textPrefix);
this.template = template;
this.role = role;
this.parseTemplate();
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Render parts in parallel
const renderedParts = await Promise.all(this._parts.map((part) => part(memory, functions, tokenizer, maxTokens)));
// Join all parts
const text = renderedParts.join('');
const length = tokenizer.encode(text).length;
// Return output
return this
|
.returnMessages([{ role: this.role, content: text }], length, tokenizer, maxTokens);
|
}
private parseTemplate(): void {
// Parse template
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < this.template.length; i++) {
const char = this.template[i];
switch (state) {
case ParseState.inText:
if (char === '{' && this.template[i + 1] === '{') {
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
part = '';
}
state = ParseState.inParameter;
i++;
} else {
part += char;
}
break;
case ParseState.inParameter:
if (char === '}' && this.template[i + 1] === '}') {
if (part.length > 0) {
if (part[0] === '$') {
this._parts.push(this.createVariableRenderer(part.substring(1)));
} else {
this._parts.push(this.createFunctionRenderer(part));
}
part = '';
}
state = ParseState.inText;
i++;
} else if (["'", '"', '`'].includes(char)) {
stringDelim = char;
state = ParseState.inString;
part += char;
} else {
part += char;
}
break;
case ParseState.inString:
part += char;
if (char === stringDelim) {
state = ParseState.inParameter;
}
break;
}
}
// Ensure we ended in the correct state
if (state !== ParseState.inText) {
throw new Error(`Invalid template: ${this.template}`);
}
// Add final part
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
}
}
private createTextRenderer(text: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
return Promise.resolve(text);
};
}
private createVariableRenderer(name: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
const vaue = memory.get(name);
return Promise.resolve(Utilities.toString(tokenizer, vaue));
};
}
private createFunctionRenderer(param: string): PartRenderer {
let name = '';
let args: string[] = [];
function savePart() {
if (part.length > 0) {
if (!name) {
name = part;
} else {
args.push(part);
}
part = '';
}
}
// Parse function name and args
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < param.length; i++) {
const char = param[i];
switch (state) {
case ParseState.inText:
if (["'", '"', '`'].includes(char)) {
savePart();
stringDelim = char;
state = ParseState.inString;
} else if (char == ' ') {
savePart();
} else {
part += char;
}
break;
case ParseState.inString:
if (char === stringDelim) {
savePart();
state = ParseState.inText;
} else {
part += char;
}
break;
}
}
// Add final part
savePart();
// Return renderer
return async (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
const value = await functions.invoke(name, memory, functions, tokenizer, args);
return Utilities.toString(tokenizer, value);
};
}
}
type PartRenderer = (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number) => Promise<string>;
enum ParseState {
inText,
inParameter,
inString
}
|
src/TemplateSection.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/TextSection.ts",
"retrieved_chunk": " this.text = text;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache length\n if (this._length < 0) {\n this._length = tokenizer.encode(this.text).length;\n }\n // Return output\n return this.returnMessages([{ role: this.role, content: this.text }], this._length, tokenizer, maxTokens);",
"score": 0.9569934606552124
},
{
"filename": "src/FunctionCallMessage.ts",
"retrieved_chunk": " this.function_call = function_call;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache response text and length\n if (this._length < 0) {\n this._length = tokenizer.encode(JSON.stringify(this.function_call)).length;\n }\n // Return output\n return this.returnMessages([{ role: 'assistant', content: null, function_call: this.function_call }], this._length, tokenizer, maxTokens);\n }",
"score": 0.9417275786399841
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;\n protected returnMessages(output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {\n // Truncate if fixed length\n if (this.tokens > 1.0) {\n while (length > this.tokens) {\n const msg = output.pop();\n const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));\n length -= encoded.length;\n if (length < this.tokens) {\n const delta = this.tokens - length;",
"score": 0.9354421496391296
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " this.textPrefix = textPrefix;\n }\n public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {\n // Render as messages\n const asMessages = await this.renderAsMessages(memory, functions, tokenizer, maxTokens);\n // Convert to text\n let text = asMessages.output.map((message) => PromptSectionBase.getMessageText(message)).join(this.separator);\n // Calculate length\n const prefixLength = tokenizer.encode(this.textPrefix).length;\n const separatorLength = tokenizer.encode(this.separator).length;",
"score": 0.9269675016403198
},
{
"filename": "src/FunctionResponseMessage.ts",
"retrieved_chunk": " this.name = name;\n this.response = response;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache response text and length\n if (this._length < 0) {\n this._text = Utilities.toString(tokenizer, this.response);\n this._length = tokenizer.encode(this.name).length + tokenizer.encode(this._text).length;\n }\n // Return output",
"score": 0.9228096008300781
}
] |
typescript
|
.returnMessages([{ role: this.role, content: text }], length, tokenizer, maxTokens);
|
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A template section that will be rendered as a message.
* @remarks
* This section type is used to render a template as a message. The template can contain
* parameters that will be replaced with values from memory or call functions to generate
* dynamic content.
*
* Template syntax:
* - `{{$memoryKey}}` - Renders the value of the specified memory key.
* - `{{functionName}}` - Calls the specified function and renders the result.
* - `{{functionName arg1 arg2 ...}}` - Calls the specified function with the provided list of arguments.
*
* Function arguments are optional and separated by spaces. They can be quoted using `'`, `"`, or `\`` delimiters.
*/
export class TemplateSection extends PromptSectionBase {
private _parts: PartRenderer[] = [];
public readonly template: string;
public readonly role: string;
/**
* Creates a new 'TemplateSection' instance.
* @param template Template to use for this section.
* @param role Message role to use for this section.
* @param tokens Optional. Sizing strategy for this section. Defaults to `auto`.
* @param required Optional. Indicates if this section is required. Defaults to `true`.
* @param separator Optional. Separator to use between sections when rendering as text. Defaults to `\n`.
* @param textPrefix Optional. Prefix to use for text output. Defaults to `undefined`.
*/
public constructor(template: string, role: string, tokens: number = -1, required: boolean = true, separator: string = '\n', textPrefix?: string) {
super(tokens, required, separator, textPrefix);
this.template = template;
this.role = role;
this.parseTemplate();
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Render parts in parallel
const renderedParts = await Promise.all(this._parts.map((part) => part(memory, functions, tokenizer, maxTokens)));
// Join all parts
const text = renderedParts.join('');
const length = tokenizer.encode(text).length;
// Return output
return this.returnMessages([{ role: this.role, content: text }], length, tokenizer, maxTokens);
}
private parseTemplate(): void {
// Parse template
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < this.template.length; i++) {
const char = this.template[i];
switch (state) {
case ParseState.inText:
if (char === '{' && this.template[i + 1] === '{') {
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
part = '';
}
state = ParseState.inParameter;
i++;
} else {
part += char;
}
break;
case ParseState.inParameter:
if (char === '}' && this.template[i + 1] === '}') {
if (part.length > 0) {
if (part[0] === '$') {
this._parts.push(this.createVariableRenderer(part.substring(1)));
} else {
this._parts.push(this.createFunctionRenderer(part));
}
part = '';
}
state = ParseState.inText;
i++;
} else if (["'", '"', '`'].includes(char)) {
stringDelim = char;
state = ParseState.inString;
part += char;
} else {
part += char;
}
break;
case ParseState.inString:
part += char;
if (char === stringDelim) {
state = ParseState.inParameter;
}
break;
}
}
// Ensure we ended in the correct state
if (state !== ParseState.inText) {
throw new Error(`Invalid template: ${this.template}`);
}
// Add final part
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
}
}
private createTextRenderer(text: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
return Promise.resolve(text);
};
}
private createVariableRenderer(name: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
const
|
vaue = memory.get(name);
|
return Promise.resolve(Utilities.toString(tokenizer, vaue));
};
}
private createFunctionRenderer(param: string): PartRenderer {
let name = '';
let args: string[] = [];
function savePart() {
if (part.length > 0) {
if (!name) {
name = part;
} else {
args.push(part);
}
part = '';
}
}
// Parse function name and args
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < param.length; i++) {
const char = param[i];
switch (state) {
case ParseState.inText:
if (["'", '"', '`'].includes(char)) {
savePart();
stringDelim = char;
state = ParseState.inString;
} else if (char == ' ') {
savePart();
} else {
part += char;
}
break;
case ParseState.inString:
if (char === stringDelim) {
savePart();
state = ParseState.inText;
} else {
part += char;
}
break;
}
}
// Add final part
savePart();
// Return renderer
return async (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
const value = await functions.invoke(name, memory, functions, tokenizer, args);
return Utilities.toString(tokenizer, value);
};
}
}
type PartRenderer = (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number) => Promise<string>;
enum ParseState {
inText,
inParameter,
inString
}
|
src/TemplateSection.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " this.variable = variable;\n this.userPrefix = userPrefix;\n this.assistantPrefix = assistantPrefix;\n }\n public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {\n // Get messages from memory\n const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];\n // Populate history and stay under the token budget\n let tokens = 0;\n const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;",
"score": 0.8629607558250427
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " this.textPrefix = textPrefix;\n }\n public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {\n // Render as messages\n const asMessages = await this.renderAsMessages(memory, functions, tokenizer, maxTokens);\n // Convert to text\n let text = asMessages.output.map((message) => PromptSectionBase.getMessageText(message)).join(this.separator);\n // Calculate length\n const prefixLength = tokenizer.encode(this.textPrefix).length;\n const separatorLength = tokenizer.encode(this.separator).length;",
"score": 0.8566815853118896
},
{
"filename": "src/FunctionRegistry.ts",
"retrieved_chunk": " }\n this._functions.set(name, value);\n }\n public invoke(key: string, memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, args: string[]): Promise<any> {\n const fn = this.get(key);\n return fn(memory, functions, tokenizer, args);\n }\n}",
"score": 0.84578537940979
},
{
"filename": "src/types.ts",
"retrieved_chunk": "}\nexport interface PromptFunctions {\n has(name: string): boolean;\n get(name: string): PromptFunction;\n invoke(name: string, memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, args: string[]): Promise<any>;\n}\nexport interface Tokenizer {\n decode(tokens: number[]): string;\n encode(text: string): number[];\n}",
"score": 0.8450009226799011
},
{
"filename": "src/FunctionResponseMessage.ts",
"retrieved_chunk": " this.name = name;\n this.response = response;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache response text and length\n if (this._length < 0) {\n this._text = Utilities.toString(tokenizer, this.response);\n this._length = tokenizer.encode(this.name).length + tokenizer.encode(this._text).length;\n }\n // Return output",
"score": 0.8439674377441406
}
] |
typescript
|
vaue = memory.get(name);
|
import { log } from "./log";
import { AssembledProgram, Option } from "./types";
import { maskOfSize } from "./util";
/**
* Builds the output buffer from the matched instructions
* @param program The configured program we have built
* @param word16Align If true, align the 12 bit opcodes to 16 bit words. The lowest nibble will be 0
* @returns The output buffer that should be written to the assembled binary
*/
export const outputInstructions = (
program: AssembledProgram,
word16Align: boolean
): Option<Buffer> => {
// This buffer stores each nibble of the program separately, and we will combine this later into the output buffer
const threeNibbleBuffer: number[] = new Array(8192 * 3);
// Fill array with 0xF
for (let i = 0; i < threeNibbleBuffer.length; i++) {
threeNibbleBuffer[i] = 0xf;
}
for (const instruction of program.matchedInstructions) {
let opcode = 0;
switch (instruction.type) {
case "literal": {
opcode = buildOpcode(instruction.opcodeString, 0, 0);
break;
}
case "immediate": {
opcode = buildOpcode(
instruction.opcodeString,
instruction.bitCount,
instruction.immediate
);
break;
}
case "label": {
const label = program.matchedLabels[instruction.label];
if (!label) {
log(`Unknown label ${instruction.label}`, instruction.lineNumber);
return { type: "none" };
}
opcode = buildOpcode(
instruction.opcodeString,
instruction.bitCount,
label.address
);
break;
}
case "constant": {
if (instruction.subtype === "literal") {
opcode = instruction.value;
} else {
// Label
const label = program.matchedLabels[instruction.label];
if (!label) {
log(`Unknown label ${instruction.label}`, instruction.lineNumber);
return { type: "none" };
}
console.log(`${label.address.toString(16)}`);
opcode = label.address;
}
break;
}
}
const low = opcode & 0xf;
const mid = (opcode & 0xf0) >> 4;
const high = (opcode & 0xf00) >> 8;
const baseAddress = instruction.address * 3;
// We use reverse order because that's how the nibbles are in the ROM
threeNibbleBuffer[baseAddress] = high;
threeNibbleBuffer[baseAddress + 1] = mid;
threeNibbleBuffer[baseAddress + 2] = low;
}
return {
type: "some",
value: copyToOutputBuffer(threeNibbleBuffer, word16Align),
};
};
const copyToOutputBuffer = (
threeNibbleBuffer: number[],
word16Align: boolean
): Buffer => {
const bufferSize = word16Align ? 8192 * 2 : (8192 * 3) / 2;
const buffer = Buffer.alloc(bufferSize);
let byteBuffer = 0;
let bufferAddress = 0;
let lowNibble = false;
let evenByte = true;
for (let i = 0; i < threeNibbleBuffer.length; i++) {
const nibble = threeNibbleBuffer[i]!;
const writeSpacerValue = word16Align && !lowNibble && evenByte;
if (lowNibble || writeSpacerValue) {
// "Second", lower value of byte, or we're writing the spacer now
byteBuffer |= nibble;
buffer[bufferAddress] = byteBuffer;
bufferAddress += 1;
byteBuffer = 0;
evenByte = !evenByte;
} else {
// "First", upper value of byte
byteBuffer |= nibble << 4;
}
if (!writeSpacerValue) {
// We've moved to the next byte if we wrote a spacer, so stay at !lowNibble
lowNibble = !lowNibble;
}
}
return buffer;
};
/**
* Comsumes the opcode template from the BASS arch file and produces the actual output word
* @param template The opcode template from the BASS arch file
* @param argSize The number of bits in an argument to the opcode, if any
* @param argument The actual data to pass as an argument to the opcode, if any
* @returns The output opcode as a 12 bit word
*/
export const buildOpcode = (
template: string,
argSize: number,
argument: number
) => {
let index = 0;
let outputWord = 0;
while (index < template.length) {
const char = template[index];
if (char === "%") {
// Consume chars until whitespace
let data = 0;
let count = 0;
for (let i = 1; i < Math.min(13, template.length - index); i++) {
const nextChar = template[index + i]!;
if (nextChar !== "1" && nextChar !== "0") {
// Stop consuming
break;
}
data <<= 1;
data |= nextChar === "1" ? 1 : 0;
count += 1;
}
// Consume the next four chars as bits
outputWord <<= count;
outputWord |= data;
index += count + 1;
} else if (char === "=") {
if (template[index + 1] !== "a") {
console.log(
`ERROR: Unexpected char after = in instruction definition "${template}"`
);
return 0;
}
outputWord <<= argSize;
outputWord |=
|
maskOfSize(argSize) & argument;
|
index += 2;
} else {
index += 1;
}
}
return outputWord;
};
|
src/lib/opcodeOutput.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/display.ts",
"retrieved_chunk": " const immediateSuffix = instructionString.substring(\n stringIndex + stringLength\n );\n let immediate = \"\";\n if (immediateLabel) {\n immediate = immediateLabel;\n } else {\n const argument = maskOfSize(bitCount) & actualWord;\n if (isLetterChar(immediatePrefix.charAt(immediatePrefix.length - 1))) {\n // If letter, treat as decimal",
"score": 0.8369553089141846
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " const matchString = numberMatch[0];\n // This is guaranteed to exist due to the regex\n const bitCount = parseNumber(numberMatch[1]!);\n const index = numberMatch.index;\n const instructionLine =\n originalInstruction.substring(0, index) +\n \"(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))\" +\n originalInstruction.substring(index + matchString.length);\n const sortableOpcode = buildSortableOpcode(opcodeString, bitCount);\n config.instructions.push({",
"score": 0.7963113784790039
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " });\n } else {\n // This is a literal\n const sortableOpcode = buildSortableOpcode(opcodeString, 0);\n config.instructions.push({\n type: \"literal\",\n regex: cleanAndFinishInstructionRegex(originalInstruction),\n opcodeString,\n sortableOpcode,\n originalInstruction: originalInstruction.trim(),",
"score": 0.7959404587745667
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " return;\n }\n program.matchedInstructions.push({\n type: \"immediate\",\n line,\n immediate: parseNumber(matches[1]),\n opcodeString: instruction.opcodeString,\n bitCount: instruction.immediate.bitCount,\n lineNumber,\n address,",
"score": 0.7956961989402771
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " program.matchedInstructions.push({\n type: \"label\",\n line,\n label: matches[2],\n opcodeString: instruction.opcodeString,\n bitCount: instruction.immediate.bitCount,\n lineNumber,\n address,\n });\n } else {",
"score": 0.790250301361084
}
] |
typescript
|
maskOfSize(argSize) & argument;
|
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A section that renders the conversation history.
*/
export class ConversationHistory extends PromptSectionBase {
public readonly variable: string;
public readonly userPrefix: string;
public readonly assistantPrefix: string;
/**
* Creates a new 'ConversationHistory' instance.
* @param variable Name of memory variable used to store the histories `Message[]`.
* @param tokens Optional. Sizing strategy for this section. Defaults to `proportional` with a value of `1.0`.
* @param required Optional. Indicates if this section is required. Defaults to `false`.
* @param userPrefix Optional. Prefix to use for user messages when rendering as text. Defaults to `user: `.
* @param assistantPrefix Optional. Prefix to use for assistant messages when rendering as text. Defaults to `assistant: `.
*/
public constructor(variable: string, tokens: number = 1.0, required: boolean = false, userPrefix: string = 'user: ', assistantPrefix: string = 'assistant: ', separator: string = '\n') {
super(tokens, required, separator);
this.variable = variable;
this.userPrefix = userPrefix;
this.assistantPrefix = assistantPrefix;
}
public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate history and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const separatorLength = tokenizer.encode(this.separator).length;
const lines: string[] = [];
for (let i = history.length - 1; i >= 0; i--) {
const msg = history[i];
const message: Message = { role:
|
msg.role, content: Utilities.toString(tokenizer, msg.content) };
|
const prefix = message.role === 'user' ? this.userPrefix : this.assistantPrefix;
const line = prefix + message.content;
const length = tokenizer.encode(line).length + (lines.length > 0 ? separatorLength : 0);
// Add initial line if required
if (lines.length === 0 && this.required) {
tokens += length;
lines.unshift(line);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add line
tokens += length;
lines.unshift(line);
}
return { output: lines.join(this.separator), length: tokens, tooLong: tokens > maxTokens };
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate messages and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const messages: Message[] = [];
for (let i = history.length - 1; i >= 0; i--) {
// Clone message
const msg = history[i];
const message: Message = Object.assign({}, msg);
if (msg.content !== null) {
message.content = Utilities.toString(tokenizer, msg.content);
}
// Get message length
const length = tokenizer.encode(PromptSectionBase.getMessageText(message)).length;
// Add initial message if required
if (messages.length === 0 && this.required) {
tokens += length;
messages.unshift(message);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add message
tokens += length;
messages.unshift(message);
}
return { output: messages, length: tokens, tooLong: tokens > maxTokens };
}
}
|
src/ConversationHistory.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " maxTokens,\n (section) => section.renderAsMessages(memory, functions, tokenizer, maxTokens),\n (section, remaining) => section.renderAsMessages(memory, functions, tokenizer, remaining)\n );\n // Build output\n const output: Message[] = [];\n for (let i = 0; i < layout.length; i++) {\n const section = layout[i];\n if (section.layout) {\n output.push(...section.layout.output);",
"score": 0.846429169178009
},
{
"filename": "src/ConversationHistory.spec.ts",
"retrieved_chunk": " assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should truncate its output to match available budget\", async () => {\n const section = new ConversationHistory('history', 1);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 1);\n assert.deepEqual(rendered.output, [\n { role: \"assistant\", content: \"Hi\" },\n ]);\n assert.equal(rendered.length, 1);",
"score": 0.8421861529350281
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;\n protected returnMessages(output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {\n // Truncate if fixed length\n if (this.tokens > 1.0) {\n while (length > this.tokens) {\n const msg = output.pop();\n const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));\n length -= encoded.length;\n if (length < this.tokens) {\n const delta = this.tokens - length;",
"score": 0.8370794653892517
},
{
"filename": "src/TextSection.ts",
"retrieved_chunk": " this.text = text;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache length\n if (this._length < 0) {\n this._length = tokenizer.encode(this.text).length;\n }\n // Return output\n return this.returnMessages([{ role: this.role, content: this.text }], this._length, tokenizer, maxTokens);",
"score": 0.8361703157424927
},
{
"filename": "src/ConversationHistory.spec.ts",
"retrieved_chunk": " const section = new ConversationHistory('longHistory', 100);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 2);\n assert.deepEqual(rendered.output, []);\n assert.equal(rendered.length, 0);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should always render the last message when section is required\", async () => {\n const section = new ConversationHistory('longHistory', 100, true);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 2);\n assert.deepEqual(rendered.output, [",
"score": 0.8339793086051941
}
] |
typescript
|
msg.role, content: Utilities.toString(tokenizer, msg.content) };
|
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A template section that will be rendered as a message.
* @remarks
* This section type is used to render a template as a message. The template can contain
* parameters that will be replaced with values from memory or call functions to generate
* dynamic content.
*
* Template syntax:
* - `{{$memoryKey}}` - Renders the value of the specified memory key.
* - `{{functionName}}` - Calls the specified function and renders the result.
* - `{{functionName arg1 arg2 ...}}` - Calls the specified function with the provided list of arguments.
*
* Function arguments are optional and separated by spaces. They can be quoted using `'`, `"`, or `\`` delimiters.
*/
export class TemplateSection extends PromptSectionBase {
private _parts: PartRenderer[] = [];
public readonly template: string;
public readonly role: string;
/**
* Creates a new 'TemplateSection' instance.
* @param template Template to use for this section.
* @param role Message role to use for this section.
* @param tokens Optional. Sizing strategy for this section. Defaults to `auto`.
* @param required Optional. Indicates if this section is required. Defaults to `true`.
* @param separator Optional. Separator to use between sections when rendering as text. Defaults to `\n`.
* @param textPrefix Optional. Prefix to use for text output. Defaults to `undefined`.
*/
public constructor(template: string, role: string, tokens: number = -1, required: boolean = true, separator: string = '\n', textPrefix?: string) {
super(tokens, required, separator, textPrefix);
this.template = template;
this.role = role;
this.parseTemplate();
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Render parts in parallel
const renderedParts = await Promise.all(this._parts.map((part) => part(memory, functions, tokenizer, maxTokens)));
// Join all parts
const text = renderedParts.join('');
const length = tokenizer.encode(text).length;
// Return output
|
return this.returnMessages([{ role: this.role, content: text }], length, tokenizer, maxTokens);
|
}
private parseTemplate(): void {
// Parse template
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < this.template.length; i++) {
const char = this.template[i];
switch (state) {
case ParseState.inText:
if (char === '{' && this.template[i + 1] === '{') {
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
part = '';
}
state = ParseState.inParameter;
i++;
} else {
part += char;
}
break;
case ParseState.inParameter:
if (char === '}' && this.template[i + 1] === '}') {
if (part.length > 0) {
if (part[0] === '$') {
this._parts.push(this.createVariableRenderer(part.substring(1)));
} else {
this._parts.push(this.createFunctionRenderer(part));
}
part = '';
}
state = ParseState.inText;
i++;
} else if (["'", '"', '`'].includes(char)) {
stringDelim = char;
state = ParseState.inString;
part += char;
} else {
part += char;
}
break;
case ParseState.inString:
part += char;
if (char === stringDelim) {
state = ParseState.inParameter;
}
break;
}
}
// Ensure we ended in the correct state
if (state !== ParseState.inText) {
throw new Error(`Invalid template: ${this.template}`);
}
// Add final part
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
}
}
private createTextRenderer(text: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
return Promise.resolve(text);
};
}
private createVariableRenderer(name: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
const vaue = memory.get(name);
return Promise.resolve(Utilities.toString(tokenizer, vaue));
};
}
private createFunctionRenderer(param: string): PartRenderer {
let name = '';
let args: string[] = [];
function savePart() {
if (part.length > 0) {
if (!name) {
name = part;
} else {
args.push(part);
}
part = '';
}
}
// Parse function name and args
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < param.length; i++) {
const char = param[i];
switch (state) {
case ParseState.inText:
if (["'", '"', '`'].includes(char)) {
savePart();
stringDelim = char;
state = ParseState.inString;
} else if (char == ' ') {
savePart();
} else {
part += char;
}
break;
case ParseState.inString:
if (char === stringDelim) {
savePart();
state = ParseState.inText;
} else {
part += char;
}
break;
}
}
// Add final part
savePart();
// Return renderer
return async (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
const value = await functions.invoke(name, memory, functions, tokenizer, args);
return Utilities.toString(tokenizer, value);
};
}
}
type PartRenderer = (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number) => Promise<string>;
enum ParseState {
inText,
inParameter,
inString
}
|
src/TemplateSection.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/TextSection.ts",
"retrieved_chunk": " this.text = text;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache length\n if (this._length < 0) {\n this._length = tokenizer.encode(this.text).length;\n }\n // Return output\n return this.returnMessages([{ role: this.role, content: this.text }], this._length, tokenizer, maxTokens);",
"score": 0.9571319818496704
},
{
"filename": "src/FunctionCallMessage.ts",
"retrieved_chunk": " this.function_call = function_call;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache response text and length\n if (this._length < 0) {\n this._length = tokenizer.encode(JSON.stringify(this.function_call)).length;\n }\n // Return output\n return this.returnMessages([{ role: 'assistant', content: null, function_call: this.function_call }], this._length, tokenizer, maxTokens);\n }",
"score": 0.9431426525115967
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " this.textPrefix = textPrefix;\n }\n public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {\n // Render as messages\n const asMessages = await this.renderAsMessages(memory, functions, tokenizer, maxTokens);\n // Convert to text\n let text = asMessages.output.map((message) => PromptSectionBase.getMessageText(message)).join(this.separator);\n // Calculate length\n const prefixLength = tokenizer.encode(this.textPrefix).length;\n const separatorLength = tokenizer.encode(this.separator).length;",
"score": 0.9338347911834717
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;\n protected returnMessages(output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {\n // Truncate if fixed length\n if (this.tokens > 1.0) {\n while (length > this.tokens) {\n const msg = output.pop();\n const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));\n length -= encoded.length;\n if (length < this.tokens) {\n const delta = this.tokens - length;",
"score": 0.9295350313186646
},
{
"filename": "src/FunctionResponseMessage.ts",
"retrieved_chunk": " this.name = name;\n this.response = response;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache response text and length\n if (this._length < 0) {\n this._text = Utilities.toString(tokenizer, this.response);\n this._length = tokenizer.encode(this.name).length + tokenizer.encode(this._text).length;\n }\n // Return output",
"score": 0.9287620186805725
}
] |
typescript
|
return this.returnMessages([{ role: this.role, content: text }], length, tokenizer, maxTokens);
|
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A template section that will be rendered as a message.
* @remarks
* This section type is used to render a template as a message. The template can contain
* parameters that will be replaced with values from memory or call functions to generate
* dynamic content.
*
* Template syntax:
* - `{{$memoryKey}}` - Renders the value of the specified memory key.
* - `{{functionName}}` - Calls the specified function and renders the result.
* - `{{functionName arg1 arg2 ...}}` - Calls the specified function with the provided list of arguments.
*
* Function arguments are optional and separated by spaces. They can be quoted using `'`, `"`, or `\`` delimiters.
*/
export class TemplateSection extends PromptSectionBase {
private _parts: PartRenderer[] = [];
public readonly template: string;
public readonly role: string;
/**
* Creates a new 'TemplateSection' instance.
* @param template Template to use for this section.
* @param role Message role to use for this section.
* @param tokens Optional. Sizing strategy for this section. Defaults to `auto`.
* @param required Optional. Indicates if this section is required. Defaults to `true`.
* @param separator Optional. Separator to use between sections when rendering as text. Defaults to `\n`.
* @param textPrefix Optional. Prefix to use for text output. Defaults to `undefined`.
*/
public constructor(template: string, role: string, tokens: number = -1, required: boolean = true, separator: string = '\n', textPrefix?: string) {
super(tokens, required, separator, textPrefix);
this.template = template;
this.role = role;
this.parseTemplate();
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Render parts in parallel
const renderedParts = await Promise.all(this._parts.map((part) => part(memory, functions, tokenizer, maxTokens)));
// Join all parts
const text = renderedParts.join('');
const length = tokenizer.encode(text).length;
// Return output
return this.returnMessages([{ role: this.role, content: text }], length, tokenizer, maxTokens);
}
private parseTemplate(): void {
// Parse template
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < this.template.length; i++) {
const char = this.template[i];
switch (state) {
case ParseState.inText:
if (char === '{' && this.template[i + 1] === '{') {
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
part = '';
}
state = ParseState.inParameter;
i++;
} else {
part += char;
}
break;
case ParseState.inParameter:
if (char === '}' && this.template[i + 1] === '}') {
if (part.length > 0) {
if (part[0] === '$') {
this._parts.push(this.createVariableRenderer(part.substring(1)));
} else {
this._parts.push(this.createFunctionRenderer(part));
}
part = '';
}
state = ParseState.inText;
i++;
} else if (["'", '"', '`'].includes(char)) {
stringDelim = char;
state = ParseState.inString;
part += char;
} else {
part += char;
}
break;
case ParseState.inString:
part += char;
if (char === stringDelim) {
state = ParseState.inParameter;
}
break;
}
}
// Ensure we ended in the correct state
if (state !== ParseState.inText) {
throw new Error(`Invalid template: ${this.template}`);
}
// Add final part
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
}
}
private createTextRenderer(text: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
return Promise.resolve(text);
};
}
private createVariableRenderer(name: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
const vaue = memory.get(name);
return Promise.resolve(Utilities.toString(tokenizer, vaue));
};
}
private createFunctionRenderer(param: string): PartRenderer {
let name = '';
let args: string[] = [];
function savePart() {
if (part.length > 0) {
if (!name) {
name = part;
} else {
args.push(part);
}
part = '';
}
}
// Parse function name and args
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < param.length; i++) {
const char = param[i];
switch (state) {
case ParseState.inText:
if (["'", '"', '`'].includes(char)) {
savePart();
stringDelim = char;
state = ParseState.inString;
} else if (char == ' ') {
savePart();
} else {
part += char;
}
break;
case ParseState.inString:
if (char === stringDelim) {
savePart();
state = ParseState.inText;
} else {
part += char;
}
break;
}
}
// Add final part
savePart();
// Return renderer
return async (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
|
const value = await functions.invoke(name, memory, functions, tokenizer, args);
|
return Utilities.toString(tokenizer, value);
};
}
}
type PartRenderer = (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number) => Promise<string>;
enum ParseState {
inText,
inParameter,
inString
}
|
src/TemplateSection.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/FunctionRegistry.ts",
"retrieved_chunk": " }\n this._functions.set(name, value);\n }\n public invoke(key: string, memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, args: string[]): Promise<any> {\n const fn = this.get(key);\n return fn(memory, functions, tokenizer, args);\n }\n}",
"score": 0.8436298370361328
},
{
"filename": "src/FunctionResponseMessage.ts",
"retrieved_chunk": " this.name = name;\n this.response = response;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache response text and length\n if (this._length < 0) {\n this._text = Utilities.toString(tokenizer, this.response);\n this._length = tokenizer.encode(this.name).length + tokenizer.encode(this._text).length;\n }\n // Return output",
"score": 0.8414284586906433
},
{
"filename": "src/FunctionCallMessage.ts",
"retrieved_chunk": " this.function_call = function_call;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache response text and length\n if (this._length < 0) {\n this._length = tokenizer.encode(JSON.stringify(this.function_call)).length;\n }\n // Return output\n return this.returnMessages([{ role: 'assistant', content: null, function_call: this.function_call }], this._length, tokenizer, maxTokens);\n }",
"score": 0.8341759443283081
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " this.textPrefix = textPrefix;\n }\n public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {\n // Render as messages\n const asMessages = await this.renderAsMessages(memory, functions, tokenizer, maxTokens);\n // Convert to text\n let text = asMessages.output.map((message) => PromptSectionBase.getMessageText(message)).join(this.separator);\n // Calculate length\n const prefixLength = tokenizer.encode(this.textPrefix).length;\n const separatorLength = tokenizer.encode(this.separator).length;",
"score": 0.8289875388145447
},
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " this.variable = variable;\n this.userPrefix = userPrefix;\n this.assistantPrefix = assistantPrefix;\n }\n public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {\n // Get messages from memory\n const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];\n // Populate history and stay under the token budget\n let tokens = 0;\n const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;",
"score": 0.8250035047531128
}
] |
typescript
|
const value = await functions.invoke(name, memory, functions, tokenizer, args);
|
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
instructionSet: InstructionSet,
program: AssembledProgram
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
|
if (hasInstruction && program.unmatchedLabels.length > 0) {
|
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program.matchedLabels[label.label];
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
const matches = labelRegex.exec(line);
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
await readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
const outputBuffer = outputInstructions(program, word16Align);
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
|
src/assembler.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " } else {\n // Label\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }\n console.log(`${label.address.toString(16)}`);\n opcode = label.address;\n }",
"score": 0.8830713629722595
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " instruction.immediate\n );\n break;\n }\n case \"label\": {\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }",
"score": 0.8552533388137817
},
{
"filename": "src/lib/types.ts",
"retrieved_chunk": " >;\n matchedLabels: {\n [name: string]: {\n lineNumber: number;\n instructionIndex: number;\n address: number;\n };\n };\n unmatchedLabels: Array<{\n label: string;",
"score": 0.849614679813385
},
{
"filename": "src/lib/disassembly.ts",
"retrieved_chunk": " if (lineLabel) {\n output += `\\n${lineLabel.name}:\\n`;\n }\n output += ` ${buildDisassembledInstructionString(\n instruction,\n immediateLabel\n )}\\n`;\n address += 1;\n }\n return output;",
"score": 0.8364822864532471
},
{
"filename": "src/lib/disassembly.ts",
"retrieved_chunk": " for (const instruction of namedLabel.instructions) {\n labelUsageMap[instruction.address] = namedLabel.name;\n }\n }\n }\n let output = \"\";\n let address = 0;\n for (const instruction of disassembledInstructions) {\n const immediateLabel = labelUsageMap[instruction.address];\n const lineLabel = namedLabels[instruction.address];",
"score": 0.8346360921859741
}
] |
typescript
|
if (hasInstruction && program.unmatchedLabels.length > 0) {
|
import { Instruction } from "./bass";
export interface MatchedInstructionBase {
line: string;
lineNumber: number;
address: number;
}
export type ConstantLiteralMatchedInstruction = MatchedInstructionBase & {
type: "constant";
subtype: "literal";
value: number;
};
export type ConstantLabelMatchedInstruction = MatchedInstructionBase & {
type: "constant";
subtype: "label";
label: string;
};
export type ImmediateMatchedInstruction = MatchedInstructionBase & {
type: "immediate";
immediate: number;
bitCount: number;
opcodeString: string;
};
export type LabelMatchedInstruction = MatchedInstructionBase & {
type: "label";
label: string;
bitCount: number;
opcodeString: string;
};
export type LiteralMatchedInstruction = MatchedInstructionBase & {
type: "literal";
opcodeString: string;
};
export interface AssembledProgram {
currentAddress: number;
matchedInstructions: Array<
| ConstantLiteralMatchedInstruction
| ConstantLabelMatchedInstruction
| ImmediateMatchedInstruction
| LabelMatchedInstruction
| LiteralMatchedInstruction
>;
matchedLabels: {
[name: string]: {
lineNumber: number;
instructionIndex: number;
address: number;
};
};
unmatchedLabels: Array<{
label: string;
lineNumber: number;
}>;
}
/// Disassembly ///
export interface DisassembledInstruction {
|
instruction: Instruction;
|
actualWord: number;
address: number;
}
export interface Some<T> {
type: "some";
value: T;
}
export interface None {
type: "none";
}
export type Option<T> = Some<T> | None;
|
src/lib/types.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/disassembly.ts",
"retrieved_chunk": " let labelCount = 0;\n const namedLabels: Array<\n | {\n name: string;\n instructions: DisassembledInstruction[];\n }\n | undefined\n > = unsetLabels.map((instructions) => {\n if (!!instructions) {\n return {",
"score": 0.8545241951942444
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " instructions: Array<Instruction>;\n}\nexport type Instruction = ImmediateInstruction | LiteralInstruction;\nexport interface InstructionBase {\n regex: RegExp;\n opcodeString: string;\n sortableOpcode: number;\n originalInstruction: string;\n}\nexport type ImmediateInstruction = InstructionBase & {",
"score": 0.8537420034408569
},
{
"filename": "src/lib/display.ts",
"retrieved_chunk": "import { DisassembledInstruction } from \"./types\";\nimport { isLetterChar, maskOfSize } from \"./util\";\nexport const buildDisassembledInstructionString = (\n { instruction, actualWord, address }: DisassembledInstruction,\n immediateLabel: string | undefined\n) => {\n let instructionString = instruction.originalInstruction;\n if (instruction.type === \"immediate\") {\n const { bitCount, stringIndex, stringLength } = instruction.immediate;\n const immediatePrefix = instructionString.substring(0, stringIndex);",
"score": 0.8400335311889648
},
{
"filename": "src/lib/disassembly.ts",
"retrieved_chunk": "import { ImmediateInstruction, Instruction } from \"./bass\";\nimport { buildDisassembledInstructionString } from \"./display\";\nimport { DisassembledInstruction } from \"./types\";\nimport { maskOfSize } from \"./util\";\nexport const parseBinaryBuffer = (\n buffer: Buffer,\n instructions: Instruction[]\n): string => {\n const disassembledInstructions: DisassembledInstruction[] = [];\n const unsetLabels: Array<DisassembledInstruction[] | undefined> = new Array(",
"score": 0.8267773389816284
},
{
"filename": "src/lib/disassembly.ts",
"retrieved_chunk": " for (const instruction of namedLabel.instructions) {\n labelUsageMap[instruction.address] = namedLabel.name;\n }\n }\n }\n let output = \"\";\n let address = 0;\n for (const instruction of disassembledInstructions) {\n const immediateLabel = labelUsageMap[instruction.address];\n const lineLabel = namedLabels[instruction.address];",
"score": 0.8253210783004761
}
] |
typescript
|
instruction: Instruction;
|
import { strict as assert } from "assert";
import { VolatileMemory } from "./VolatileMemory";
describe("VolatileMemory", () => {
describe("constructor", () => {
it("should create a VolatileMemory", () => {
const memory = new VolatileMemory();
assert.notEqual(memory, null);
});
it("should create a VolatileMemory with initial values", () => {
const memory = new VolatileMemory({
"test": 123
});
assert.notEqual(memory, null);
assert.equal(memory.has("test"), true);
});
});
const obj = { foo: 'bar' };
const memory = new VolatileMemory();
describe("set", () => {
it("should set a primitive value", () => {
memory.set("test", 123);
assert.equal(memory.has("test"), true);
});
it("should set an object", () => {
memory.set("test2", obj);
assert.equal(memory.has("test2"), true);
});
});
describe("get", () => {
it("should get a primitive value", () => {
const value = memory.get("test");
assert.equal(value, 123);
});
it("should get an object that's a clone", () => {
const value = memory.get("test2");
assert.deepEqual(value, { foo: 'bar' });
assert.notEqual(value, obj);
});
it("should return undefined when getting a value that doesn't exist", () => {
const value = memory.get("test3");
assert.equal(value, undefined);
});
});
describe("has", () => {
it("should return false when a value doesn't exist", () => {
assert.equal(memory.has("test3"), false);
});
it("should return true when a value exists", () => {
assert.equal(memory.has("test"), true);
});
});
describe("delete", () => {
it("should delete a value", () => {
memory.delete("test");
assert.equal(memory.has("test"), false);
assert.equal(memory.has("test2"), true);
});
});
describe("clear", () => {
it("should clear all values", () => {
memory.set("test", 123);
|
memory.clear();
|
assert.equal(memory.has("test"), false);
assert.equal(memory.has("test2"), false);
});
});
});
|
src/VolatileMemory.spec.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/VolatileMemory.ts",
"retrieved_chunk": " const clone = JSON.parse(JSON.stringify(value));\n this._memory.set(key, clone);\n } else {\n this._memory.set(key, value);\n }\n }\n public delete(key: string): void {\n this._memory.delete(key);\n }\n public clear(): void {",
"score": 0.8629522323608398
},
{
"filename": "src/VolatileMemory.ts",
"retrieved_chunk": " this._memory.clear();\n }\n}",
"score": 0.8422620296478271
},
{
"filename": "src/FunctionRegistry.spec.ts",
"retrieved_chunk": " it(\"should return true when a function exists\", () => {\n const registry = new FunctionRegistry({\n \"test\": async (memory, functions, tokenizer, args) => { }\n });\n assert.equal(registry.has(\"test\"), true);\n });\n });\n describe(\"invoke\", () => {\n const memory = new VolatileMemory();\n const tokenizer = new GPT3Tokenizer();",
"score": 0.8260490298271179
},
{
"filename": "src/FunctionRegistry.spec.ts",
"retrieved_chunk": " assert.equal(called, true);\n });\n it(\"should throw when invoking a function that doesn't exist\", () => {\n const registry = new FunctionRegistry();\n assert.throws(() => registry.invoke(\"test\", memory, registry, tokenizer, [\"Hello World\"]));\n });\n });\n});",
"score": 0.8216001391410828
},
{
"filename": "src/FunctionRegistry.spec.ts",
"retrieved_chunk": " it(\"should invoke a function\", async () => {\n let called = false;\n const registry = new FunctionRegistry({\n \"test\": async (memory, functions, tokenizer, args) => {\n assert.equal(args.length, 1);\n assert.equal(args[0], \"Hello World\");\n called = true;\n }\n });\n await registry.invoke(\"test\", memory, registry, tokenizer, [\"Hello World\"]);",
"score": 0.8134275674819946
}
] |
typescript
|
memory.clear();
|
import { ImmediateInstruction, Instruction } from "./bass";
import { buildDisassembledInstructionString } from "./display";
import { DisassembledInstruction } from "./types";
import { maskOfSize } from "./util";
export const parseBinaryBuffer = (
buffer: Buffer,
instructions: Instruction[]
): string => {
const disassembledInstructions: DisassembledInstruction[] = [];
const unsetLabels: Array<DisassembledInstruction[] | undefined> = new Array(
8192
);
for (let i = 0; i < buffer.length; i += 2) {
const highByte = buffer[i]!;
const lowByte = buffer[i + 1]!;
const address = i / 2;
const correctedWord = (highByte << 8) | lowByte;
const instruction = findWordInstruction(correctedWord, instructions);
const disassembledInstruction: DisassembledInstruction = {
instruction,
actualWord: correctedWord,
address,
};
if (isFlowControlWithImmediate(instruction)) {
// Convert local address into global one
const pcLowerByte =
correctedWord & maskOfSize(instruction.immediate.bitCount);
let pcUpperFive = (address >> 8) & 0x1f;
if (isCalz(instruction)) {
// calz is only zero page and prevents pset
pcUpperFive = 0;
} else {
const lastInstruction =
disassembledInstructions[disassembledInstructions.length - 1]!;
|
if (isPset(lastInstruction.instruction)) {
|
// PSET immediate determines our upper 5 bits
pcUpperFive = lastInstruction.actualWord & 0x1f;
}
}
const pc = (pcUpperFive << 8) | pcLowerByte;
const existingLabel = unsetLabels[pc];
if (existingLabel) {
existingLabel.push(disassembledInstruction);
} else {
unsetLabels[pc] = [disassembledInstruction];
}
}
disassembledInstructions.push(disassembledInstruction);
}
// Build label names
let labelCount = 0;
const namedLabels: Array<
| {
name: string;
instructions: DisassembledInstruction[];
}
| undefined
> = unsetLabels.map((instructions) => {
if (!!instructions) {
return {
name: `label_${labelCount++}`,
instructions,
};
}
return undefined;
});
// Build list of instructions that will replace the immedates with these labels, and build labels
const labelUsageMap: Array<string | undefined> = new Array(8192);
for (const namedLabel of namedLabels) {
if (namedLabel) {
for (const instruction of namedLabel.instructions) {
labelUsageMap[instruction.address] = namedLabel.name;
}
}
}
let output = "";
let address = 0;
for (const instruction of disassembledInstructions) {
const immediateLabel = labelUsageMap[instruction.address];
const lineLabel = namedLabels[instruction.address];
if (lineLabel) {
output += `\n${lineLabel.name}:\n`;
}
output += ` ${buildDisassembledInstructionString(
instruction,
immediateLabel
)}\n`;
address += 1;
}
return output;
};
const findWordInstruction = (word: number, instructions: Instruction[]) => {
// Naive because it doesn't really matter
let bestMatch = instructions[0]!;
for (let i = 0; i < instructions.length; i++) {
const instruction = instructions[i]!;
if (instruction.sortableOpcode <= word) {
bestMatch = instruction;
} else {
// We've passed the best solution, end
break;
}
}
return bestMatch;
};
const flowControlImmediateMnemonics = ((): Set<string> =>
new Set<string>(["call", "calz", "jp"]))();
const extractMnemonic = (instruction: Instruction): string =>
instruction.originalInstruction.split(/\s/)[0]!.trim();
const isFlowControlWithImmediate = (
instruction: Instruction
): instruction is ImmediateInstruction => {
const mnemonic = extractMnemonic(instruction);
return flowControlImmediateMnemonics.has(mnemonic);
};
const isPset = (instruction: Instruction): boolean => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "pset";
};
const isCalz = (instruction: Instruction) => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "calz";
};
|
src/lib/disassembly.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/display.ts",
"retrieved_chunk": "import { DisassembledInstruction } from \"./types\";\nimport { isLetterChar, maskOfSize } from \"./util\";\nexport const buildDisassembledInstructionString = (\n { instruction, actualWord, address }: DisassembledInstruction,\n immediateLabel: string | undefined\n) => {\n let instructionString = instruction.originalInstruction;\n if (instruction.type === \"immediate\") {\n const { bitCount, stringIndex, stringLength } = instruction.immediate;\n const immediatePrefix = instructionString.substring(0, stringIndex);",
"score": 0.8386669754981995
},
{
"filename": "src/lib/display.ts",
"retrieved_chunk": " const immediateSuffix = instructionString.substring(\n stringIndex + stringLength\n );\n let immediate = \"\";\n if (immediateLabel) {\n immediate = immediateLabel;\n } else {\n const argument = maskOfSize(bitCount) & actualWord;\n if (isLetterChar(immediatePrefix.charAt(immediatePrefix.length - 1))) {\n // If letter, treat as decimal",
"score": 0.8293737173080444
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " break;\n }\n }\n const low = opcode & 0xf;\n const mid = (opcode & 0xf0) >> 4;\n const high = (opcode & 0xf00) >> 8;\n const baseAddress = instruction.address * 3;\n // We use reverse order because that's how the nibbles are in the ROM\n threeNibbleBuffer[baseAddress] = high;\n threeNibbleBuffer[baseAddress + 1] = mid;",
"score": 0.8212839961051941
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " let opcode = 0;\n switch (instruction.type) {\n case \"literal\": {\n opcode = buildOpcode(instruction.opcodeString, 0, 0);\n break;\n }\n case \"immediate\": {\n opcode = buildOpcode(\n instruction.opcodeString,\n instruction.bitCount,",
"score": 0.8191369771957397
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " } else {\n // Label\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }\n console.log(`${label.address.toString(16)}`);\n opcode = label.address;\n }",
"score": 0.815758228302002
}
] |
typescript
|
if (isPset(lastInstruction.instruction)) {
|
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
instructionSet: InstructionSet,
program: AssembledProgram
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
if (hasInstruction && program.unmatchedLabels.length > 0) {
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program.matchedLabels[label.label];
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
|
const matches = labelRegex.exec(line);
|
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
await readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
const outputBuffer = outputInstructions(program, word16Align);
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
|
src/assembler.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " } else {\n // Label\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }\n console.log(`${label.address.toString(16)}`);\n opcode = label.address;\n }",
"score": 0.8739342093467712
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " instruction.immediate\n );\n break;\n }\n case \"label\": {\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }",
"score": 0.8551946878433228
},
{
"filename": "src/lib/types.ts",
"retrieved_chunk": " >;\n matchedLabels: {\n [name: string]: {\n lineNumber: number;\n instructionIndex: number;\n address: number;\n };\n };\n unmatchedLabels: Array<{\n label: string;",
"score": 0.8539324998855591
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " type: \"immediate\",\n regex: cleanAndFinishInstructionRegex(instructionLine),\n immediate: {\n bitCount,\n stringIndex: index,\n stringLength: matchString.length,\n },\n opcodeString,\n sortableOpcode,\n originalInstruction: originalInstruction.trim(),",
"score": 0.8330153226852417
},
{
"filename": "src/lib/disassembly.ts",
"retrieved_chunk": " if (lineLabel) {\n output += `\\n${lineLabel.name}:\\n`;\n }\n output += ` ${buildDisassembledInstructionString(\n instruction,\n immediateLabel\n )}\\n`;\n address += 1;\n }\n return output;",
"score": 0.8320857286453247
}
] |
typescript
|
const matches = labelRegex.exec(line);
|
import { strict as assert } from "assert";
import { FunctionRegistry } from "./FunctionRegistry";
import { VolatileMemory } from "./VolatileMemory";
import { GPT3Tokenizer } from "./GPT3Tokenizer";
describe("FunctionRegistry", () => {
describe("constructor", () => {
it("should create a FunctionRegistry", () => {
const registry = new FunctionRegistry();
assert.notEqual(registry, null);
assert.equal(registry.has("test"), false);
});
it("should create a FunctionRegistry with initial functions", () => {
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => { }
});
assert.notEqual(registry, null);
assert.equal(registry.has("test"), true);
});
});
describe("addFunction", () => {
it("should add a function", () => {
const registry = new FunctionRegistry();
registry.addFunction("test", async (memory, functions, tokenizer, args) => { });
assert.equal(registry.has("test"), true);
});
it("should throw when adding a function that already exists", () => {
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => { }
});
assert.throws(() => registry.addFunction("test", async (memory, functions, tokenizer, args) => { }));
});
});
describe("get", () => {
it("should get a function", () => {
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => { }
});
|
const fn = registry.get("test");
|
assert.notEqual(fn, null);
});
it("should throw when getting a function that doesn't exist", () => {
const registry = new FunctionRegistry();
assert.throws(() => registry.get("test"));
});
});
describe("has", () => {
it("should return false when a function doesn't exist", () => {
const registry = new FunctionRegistry();
assert.equal(registry.has("test"), false);
});
it("should return true when a function exists", () => {
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => { }
});
assert.equal(registry.has("test"), true);
});
});
describe("invoke", () => {
const memory = new VolatileMemory();
const tokenizer = new GPT3Tokenizer();
it("should invoke a function", async () => {
let called = false;
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => {
assert.equal(args.length, 1);
assert.equal(args[0], "Hello World");
called = true;
}
});
await registry.invoke("test", memory, registry, tokenizer, ["Hello World"]);
assert.equal(called, true);
});
it("should throw when invoking a function that doesn't exist", () => {
const registry = new FunctionRegistry();
assert.throws(() => registry.invoke("test", memory, registry, tokenizer, ["Hello World"]));
});
});
});
|
src/FunctionRegistry.spec.ts
|
Stevenic-promptrix-4a210d8
|
[
{
"filename": "src/VolatileMemory.spec.ts",
"retrieved_chunk": " const value = memory.get(\"test\");\n assert.equal(value, 123);\n });\n it(\"should get an object that's a clone\", () => {\n const value = memory.get(\"test2\");\n assert.deepEqual(value, { foo: 'bar' });\n assert.notEqual(value, obj);\n });\n it(\"should return undefined when getting a value that doesn't exist\", () => {\n const value = memory.get(\"test3\");",
"score": 0.8486277461051941
},
{
"filename": "src/VolatileMemory.spec.ts",
"retrieved_chunk": " memory.set(\"test\", 123);\n assert.equal(memory.has(\"test\"), true);\n });\n it(\"should set an object\", () => {\n memory.set(\"test2\", obj);\n assert.equal(memory.has(\"test2\"), true);\n });\n });\n describe(\"get\", () => {\n it(\"should get a primitive value\", () => {",
"score": 0.8425010442733765
},
{
"filename": "src/VolatileMemory.spec.ts",
"retrieved_chunk": " assert.equal(value, undefined);\n });\n });\n describe(\"has\", () => {\n it(\"should return false when a value doesn't exist\", () => {\n assert.equal(memory.has(\"test3\"), false);\n });\n it(\"should return true when a value exists\", () => {\n assert.equal(memory.has(\"test\"), true);\n });",
"score": 0.8239635229110718
},
{
"filename": "src/ConversationHistory.spec.ts",
"retrieved_chunk": " ],\n \"longHistory\": [\n { role: \"user\", content: \"Hello\" },\n { role: \"assistant\", content: \"Hi! How can I help you?\" },\n { role: \"user\", content: \"I'd like to book a flight\" },\n { role: \"assistant\", content: \"Sure, where would you like to go?\" },\n ]\n });\n const functions = new FunctionRegistry();\n const tokenizer = new GPT3Tokenizer();",
"score": 0.8236526250839233
},
{
"filename": "src/VolatileMemory.spec.ts",
"retrieved_chunk": " });\n describe(\"delete\", () => {\n it(\"should delete a value\", () => {\n memory.delete(\"test\");\n assert.equal(memory.has(\"test\"), false);\n assert.equal(memory.has(\"test2\"), true);\n });\n });\n describe(\"clear\", () => {\n it(\"should clear all values\", () => {",
"score": 0.8174883127212524
}
] |
typescript
|
const fn = registry.get("test");
|
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
instructionSet: InstructionSet,
program: AssembledProgram
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
if (hasInstruction && program.unmatchedLabels.length > 0) {
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program
|
.matchedLabels[label.label];
|
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
const matches = labelRegex.exec(line);
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
await readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
const outputBuffer = outputInstructions(program, word16Align);
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
|
src/assembler.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " } else {\n // Label\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }\n console.log(`${label.address.toString(16)}`);\n opcode = label.address;\n }",
"score": 0.869271457195282
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " instruction.immediate\n );\n break;\n }\n case \"label\": {\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }",
"score": 0.8508179187774658
},
{
"filename": "src/lib/disassembly.ts",
"retrieved_chunk": " for (const instruction of namedLabel.instructions) {\n labelUsageMap[instruction.address] = namedLabel.name;\n }\n }\n }\n let output = \"\";\n let address = 0;\n for (const instruction of disassembledInstructions) {\n const immediateLabel = labelUsageMap[instruction.address];\n const lineLabel = namedLabels[instruction.address];",
"score": 0.8387681841850281
},
{
"filename": "src/lib/types.ts",
"retrieved_chunk": " >;\n matchedLabels: {\n [name: string]: {\n lineNumber: number;\n instructionIndex: number;\n address: number;\n };\n };\n unmatchedLabels: Array<{\n label: string;",
"score": 0.8233546614646912
},
{
"filename": "src/lib/disassembly.ts",
"retrieved_chunk": " if (lineLabel) {\n output += `\\n${lineLabel.name}:\\n`;\n }\n output += ` ${buildDisassembledInstructionString(\n instruction,\n immediateLabel\n )}\\n`;\n address += 1;\n }\n return output;",
"score": 0.8189974427223206
}
] |
typescript
|
.matchedLabels[label.label];
|
import { ImmediateInstruction, Instruction } from "./bass";
import { buildDisassembledInstructionString } from "./display";
import { DisassembledInstruction } from "./types";
import { maskOfSize } from "./util";
export const parseBinaryBuffer = (
buffer: Buffer,
instructions: Instruction[]
): string => {
const disassembledInstructions: DisassembledInstruction[] = [];
const unsetLabels: Array<DisassembledInstruction[] | undefined> = new Array(
8192
);
for (let i = 0; i < buffer.length; i += 2) {
const highByte = buffer[i]!;
const lowByte = buffer[i + 1]!;
const address = i / 2;
const correctedWord = (highByte << 8) | lowByte;
const instruction = findWordInstruction(correctedWord, instructions);
const disassembledInstruction: DisassembledInstruction = {
instruction,
actualWord: correctedWord,
address,
};
if (isFlowControlWithImmediate(instruction)) {
// Convert local address into global one
const pcLowerByte =
correctedWord & maskOfSize(instruction.immediate.bitCount);
let pcUpperFive = (address >> 8) & 0x1f;
if (isCalz(instruction)) {
// calz is only zero page and prevents pset
pcUpperFive = 0;
} else {
const lastInstruction =
disassembledInstructions[disassembledInstructions.length - 1]!;
if (isPset(lastInstruction.instruction)) {
// PSET immediate determines our upper 5 bits
pcUpperFive = lastInstruction.actualWord & 0x1f;
}
}
const pc = (pcUpperFive << 8) | pcLowerByte;
const existingLabel = unsetLabels[pc];
if (existingLabel) {
existingLabel.push(disassembledInstruction);
} else {
unsetLabels[pc] = [disassembledInstruction];
}
}
disassembledInstructions.push(disassembledInstruction);
}
// Build label names
let labelCount = 0;
const namedLabels: Array<
| {
name: string;
instructions: DisassembledInstruction[];
}
| undefined
> = unsetLabels.map((instructions) => {
if (!!instructions) {
return {
name: `label_${labelCount++}`,
instructions,
};
}
return undefined;
});
// Build list of instructions that will replace the immedates with these labels, and build labels
const labelUsageMap: Array<string | undefined> = new Array(8192);
for (const namedLabel of namedLabels) {
if (namedLabel) {
for (const instruction of namedLabel.instructions) {
labelUsageMap[instruction.address] = namedLabel.name;
}
}
}
let output = "";
let address = 0;
for (const instruction of disassembledInstructions) {
const immediateLabel = labelUsageMap[instruction.address];
const lineLabel = namedLabels[instruction.address];
if (lineLabel) {
output += `\n${lineLabel.name}:\n`;
}
output += ` ${buildDisassembledInstructionString(
instruction,
immediateLabel
)}\n`;
address += 1;
}
return output;
};
const findWordInstruction = (word: number, instructions: Instruction[]) => {
// Naive because it doesn't really matter
let bestMatch = instructions[0]!;
for (let i = 0; i < instructions.length; i++) {
const instruction = instructions[i]!;
|
if (instruction.sortableOpcode <= word) {
|
bestMatch = instruction;
} else {
// We've passed the best solution, end
break;
}
}
return bestMatch;
};
const flowControlImmediateMnemonics = ((): Set<string> =>
new Set<string>(["call", "calz", "jp"]))();
const extractMnemonic = (instruction: Instruction): string =>
instruction.originalInstruction.split(/\s/)[0]!.trim();
const isFlowControlWithImmediate = (
instruction: Instruction
): instruction is ImmediateInstruction => {
const mnemonic = extractMnemonic(instruction);
return flowControlImmediateMnemonics.has(mnemonic);
};
const isPset = (instruction: Instruction): boolean => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "pset";
};
const isCalz = (instruction: Instruction) => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "calz";
};
|
src/lib/disassembly.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " const matchString = numberMatch[0];\n // This is guaranteed to exist due to the regex\n const bitCount = parseNumber(numberMatch[1]!);\n const index = numberMatch.index;\n const instructionLine =\n originalInstruction.substring(0, index) +\n \"(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))\" +\n originalInstruction.substring(index + matchString.length);\n const sortableOpcode = buildSortableOpcode(opcodeString, bitCount);\n config.instructions.push({",
"score": 0.8421190977096558
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " });\n } else {\n // This is a literal\n const sortableOpcode = buildSortableOpcode(opcodeString, 0);\n config.instructions.push({\n type: \"literal\",\n regex: cleanAndFinishInstructionRegex(originalInstruction),\n opcodeString,\n sortableOpcode,\n originalInstruction: originalInstruction.trim(),",
"score": 0.8356550931930542
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " } else {\n // Label\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }\n console.log(`${label.address.toString(16)}`);\n opcode = label.address;\n }",
"score": 0.8342666625976562
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " program.matchedInstructions.push({\n type: \"label\",\n line,\n label: matches[2],\n opcodeString: instruction.opcodeString,\n bitCount: instruction.immediate.bitCount,\n lineNumber,\n address,\n });\n } else {",
"score": 0.8281682729721069
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " let opcode = 0;\n switch (instruction.type) {\n case \"literal\": {\n opcode = buildOpcode(instruction.opcodeString, 0, 0);\n break;\n }\n case \"immediate\": {\n opcode = buildOpcode(\n instruction.opcodeString,\n instruction.bitCount,",
"score": 0.8250205516815186
}
] |
typescript
|
if (instruction.sortableOpcode <= word) {
|
import { ImmediateInstruction, Instruction } from "./bass";
import { buildDisassembledInstructionString } from "./display";
import { DisassembledInstruction } from "./types";
import { maskOfSize } from "./util";
export const parseBinaryBuffer = (
buffer: Buffer,
instructions: Instruction[]
): string => {
const disassembledInstructions: DisassembledInstruction[] = [];
const unsetLabels: Array<DisassembledInstruction[] | undefined> = new Array(
8192
);
for (let i = 0; i < buffer.length; i += 2) {
const highByte = buffer[i]!;
const lowByte = buffer[i + 1]!;
const address = i / 2;
const correctedWord = (highByte << 8) | lowByte;
const instruction = findWordInstruction(correctedWord, instructions);
const disassembledInstruction: DisassembledInstruction = {
instruction,
actualWord: correctedWord,
address,
};
if (isFlowControlWithImmediate(instruction)) {
// Convert local address into global one
const pcLowerByte =
correctedWord & maskOfSize(instruction.immediate.bitCount);
let pcUpperFive = (address >> 8) & 0x1f;
if (isCalz(instruction)) {
// calz is only zero page and prevents pset
pcUpperFive = 0;
} else {
const lastInstruction =
disassembledInstructions[disassembledInstructions.length - 1]!;
if (isPset(lastInstruction.instruction)) {
// PSET immediate determines our upper 5 bits
pcUpperFive
|
= lastInstruction.actualWord & 0x1f;
|
}
}
const pc = (pcUpperFive << 8) | pcLowerByte;
const existingLabel = unsetLabels[pc];
if (existingLabel) {
existingLabel.push(disassembledInstruction);
} else {
unsetLabels[pc] = [disassembledInstruction];
}
}
disassembledInstructions.push(disassembledInstruction);
}
// Build label names
let labelCount = 0;
const namedLabels: Array<
| {
name: string;
instructions: DisassembledInstruction[];
}
| undefined
> = unsetLabels.map((instructions) => {
if (!!instructions) {
return {
name: `label_${labelCount++}`,
instructions,
};
}
return undefined;
});
// Build list of instructions that will replace the immedates with these labels, and build labels
const labelUsageMap: Array<string | undefined> = new Array(8192);
for (const namedLabel of namedLabels) {
if (namedLabel) {
for (const instruction of namedLabel.instructions) {
labelUsageMap[instruction.address] = namedLabel.name;
}
}
}
let output = "";
let address = 0;
for (const instruction of disassembledInstructions) {
const immediateLabel = labelUsageMap[instruction.address];
const lineLabel = namedLabels[instruction.address];
if (lineLabel) {
output += `\n${lineLabel.name}:\n`;
}
output += ` ${buildDisassembledInstructionString(
instruction,
immediateLabel
)}\n`;
address += 1;
}
return output;
};
const findWordInstruction = (word: number, instructions: Instruction[]) => {
// Naive because it doesn't really matter
let bestMatch = instructions[0]!;
for (let i = 0; i < instructions.length; i++) {
const instruction = instructions[i]!;
if (instruction.sortableOpcode <= word) {
bestMatch = instruction;
} else {
// We've passed the best solution, end
break;
}
}
return bestMatch;
};
const flowControlImmediateMnemonics = ((): Set<string> =>
new Set<string>(["call", "calz", "jp"]))();
const extractMnemonic = (instruction: Instruction): string =>
instruction.originalInstruction.split(/\s/)[0]!.trim();
const isFlowControlWithImmediate = (
instruction: Instruction
): instruction is ImmediateInstruction => {
const mnemonic = extractMnemonic(instruction);
return flowControlImmediateMnemonics.has(mnemonic);
};
const isPset = (instruction: Instruction): boolean => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "pset";
};
const isCalz = (instruction: Instruction) => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "calz";
};
|
src/lib/disassembly.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " } else {\n // Label\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }\n console.log(`${label.address.toString(16)}`);\n opcode = label.address;\n }",
"score": 0.8166608810424805
},
{
"filename": "src/lib/display.ts",
"retrieved_chunk": "import { DisassembledInstruction } from \"./types\";\nimport { isLetterChar, maskOfSize } from \"./util\";\nexport const buildDisassembledInstructionString = (\n { instruction, actualWord, address }: DisassembledInstruction,\n immediateLabel: string | undefined\n) => {\n let instructionString = instruction.originalInstruction;\n if (instruction.type === \"immediate\") {\n const { bitCount, stringIndex, stringLength } = instruction.immediate;\n const immediatePrefix = instructionString.substring(0, stringIndex);",
"score": 0.8160486221313477
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " let opcode = 0;\n switch (instruction.type) {\n case \"literal\": {\n opcode = buildOpcode(instruction.opcodeString, 0, 0);\n break;\n }\n case \"immediate\": {\n opcode = buildOpcode(\n instruction.opcodeString,\n instruction.bitCount,",
"score": 0.810991108417511
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " break;\n }\n }\n const low = opcode & 0xf;\n const mid = (opcode & 0xf0) >> 4;\n const high = (opcode & 0xf00) >> 8;\n const baseAddress = instruction.address * 3;\n // We use reverse order because that's how the nibbles are in the ROM\n threeNibbleBuffer[baseAddress] = high;\n threeNibbleBuffer[baseAddress + 1] = mid;",
"score": 0.8086148500442505
},
{
"filename": "src/lib/display.ts",
"retrieved_chunk": " const immediateSuffix = instructionString.substring(\n stringIndex + stringLength\n );\n let immediate = \"\";\n if (immediateLabel) {\n immediate = immediateLabel;\n } else {\n const argument = maskOfSize(bitCount) & actualWord;\n if (isLetterChar(immediatePrefix.charAt(immediatePrefix.length - 1))) {\n // If letter, treat as decimal",
"score": 0.808557391166687
}
] |
typescript
|
= lastInstruction.actualWord & 0x1f;
|
import { ImmediateInstruction, Instruction } from "./bass";
import { buildDisassembledInstructionString } from "./display";
import { DisassembledInstruction } from "./types";
import { maskOfSize } from "./util";
export const parseBinaryBuffer = (
buffer: Buffer,
instructions: Instruction[]
): string => {
const disassembledInstructions: DisassembledInstruction[] = [];
const unsetLabels: Array<DisassembledInstruction[] | undefined> = new Array(
8192
);
for (let i = 0; i < buffer.length; i += 2) {
const highByte = buffer[i]!;
const lowByte = buffer[i + 1]!;
const address = i / 2;
const correctedWord = (highByte << 8) | lowByte;
const instruction = findWordInstruction(correctedWord, instructions);
const disassembledInstruction: DisassembledInstruction = {
instruction,
actualWord: correctedWord,
address,
};
if (isFlowControlWithImmediate(instruction)) {
// Convert local address into global one
const pcLowerByte =
correctedWord & maskOfSize(instruction.immediate.bitCount);
let pcUpperFive = (address >> 8) & 0x1f;
if (isCalz(instruction)) {
// calz is only zero page and prevents pset
pcUpperFive = 0;
} else {
const lastInstruction =
disassembledInstructions[disassembledInstructions.length - 1]!;
if (isPset(lastInstruction.instruction)) {
// PSET immediate determines our upper 5 bits
pcUpperFive = lastInstruction.actualWord & 0x1f;
}
}
const pc = (pcUpperFive << 8) | pcLowerByte;
const existingLabel = unsetLabels[pc];
if (existingLabel) {
existingLabel.push(disassembledInstruction);
} else {
unsetLabels[pc] = [disassembledInstruction];
}
}
disassembledInstructions.push(disassembledInstruction);
}
// Build label names
let labelCount = 0;
const namedLabels: Array<
| {
name: string;
instructions: DisassembledInstruction[];
}
| undefined
> = unsetLabels.map((instructions) => {
if (!!instructions) {
return {
name: `label_${labelCount++}`,
instructions,
};
}
return undefined;
});
// Build list of instructions that will replace the immedates with these labels, and build labels
const labelUsageMap: Array<string | undefined> = new Array(8192);
for (const namedLabel of namedLabels) {
if (namedLabel) {
for (const instruction of namedLabel.instructions) {
labelUsageMap[instruction.address] = namedLabel.name;
}
}
}
let output = "";
let address = 0;
for (const instruction of disassembledInstructions) {
const immediateLabel = labelUsageMap[instruction.address];
const lineLabel = namedLabels[instruction.address];
if (lineLabel) {
output += `\n${lineLabel.name}:\n`;
}
output += `
|
${buildDisassembledInstructionString(
instruction,
immediateLabel
)}\n`;
|
address += 1;
}
return output;
};
const findWordInstruction = (word: number, instructions: Instruction[]) => {
// Naive because it doesn't really matter
let bestMatch = instructions[0]!;
for (let i = 0; i < instructions.length; i++) {
const instruction = instructions[i]!;
if (instruction.sortableOpcode <= word) {
bestMatch = instruction;
} else {
// We've passed the best solution, end
break;
}
}
return bestMatch;
};
const flowControlImmediateMnemonics = ((): Set<string> =>
new Set<string>(["call", "calz", "jp"]))();
const extractMnemonic = (instruction: Instruction): string =>
instruction.originalInstruction.split(/\s/)[0]!.trim();
const isFlowControlWithImmediate = (
instruction: Instruction
): instruction is ImmediateInstruction => {
const mnemonic = extractMnemonic(instruction);
return flowControlImmediateMnemonics.has(mnemonic);
};
const isPset = (instruction: Instruction): boolean => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "pset";
};
const isCalz = (instruction: Instruction) => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "calz";
};
|
src/lib/disassembly.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/display.ts",
"retrieved_chunk": "import { DisassembledInstruction } from \"./types\";\nimport { isLetterChar, maskOfSize } from \"./util\";\nexport const buildDisassembledInstructionString = (\n { instruction, actualWord, address }: DisassembledInstruction,\n immediateLabel: string | undefined\n) => {\n let instructionString = instruction.originalInstruction;\n if (instruction.type === \"immediate\") {\n const { bitCount, stringIndex, stringLength } = instruction.immediate;\n const immediatePrefix = instructionString.substring(0, stringIndex);",
"score": 0.8639956712722778
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " } else {\n // Label\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }\n console.log(`${label.address.toString(16)}`);\n opcode = label.address;\n }",
"score": 0.8638830184936523
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " instruction.immediate\n );\n break;\n }\n case \"label\": {\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }",
"score": 0.8440353274345398
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " program.matchedInstructions.push({\n type: \"label\",\n line,\n label: matches[2],\n opcodeString: instruction.opcodeString,\n bitCount: instruction.immediate.bitCount,\n lineNumber,\n address,\n });\n } else {",
"score": 0.8434696197509766
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " lineNumber,\n address,\n });\n } else if (label !== undefined) {\n program.matchedInstructions.push({\n type: \"constant\",\n subtype: \"label\",\n label,\n line,\n lineNumber,",
"score": 0.8419370651245117
}
] |
typescript
|
${buildDisassembledInstructionString(
instruction,
immediateLabel
)}\n`;
|
import { log } from "./log";
import { AssembledProgram, Option } from "./types";
import { maskOfSize } from "./util";
/**
* Builds the output buffer from the matched instructions
* @param program The configured program we have built
* @param word16Align If true, align the 12 bit opcodes to 16 bit words. The lowest nibble will be 0
* @returns The output buffer that should be written to the assembled binary
*/
export const outputInstructions = (
program: AssembledProgram,
word16Align: boolean
): Option<Buffer> => {
// This buffer stores each nibble of the program separately, and we will combine this later into the output buffer
const threeNibbleBuffer: number[] = new Array(8192 * 3);
// Fill array with 0xF
for (let i = 0; i < threeNibbleBuffer.length; i++) {
threeNibbleBuffer[i] = 0xf;
}
for (const instruction of program.matchedInstructions) {
let opcode = 0;
switch (instruction.type) {
case "literal": {
opcode = buildOpcode(instruction.opcodeString, 0, 0);
break;
}
case "immediate": {
opcode = buildOpcode(
instruction.opcodeString,
instruction.bitCount,
instruction.immediate
);
break;
}
case "label": {
const label = program.matchedLabels[instruction.label];
if (!label) {
log(`Unknown label ${instruction.label}`, instruction.lineNumber);
return { type: "none" };
}
opcode = buildOpcode(
instruction.opcodeString,
instruction.bitCount,
label.address
);
break;
}
case "constant": {
if (instruction.subtype === "literal") {
opcode = instruction.value;
} else {
// Label
const label = program.matchedLabels[instruction.label];
if (!label) {
log(`Unknown label ${instruction.label}`, instruction.lineNumber);
return { type: "none" };
}
console.log(`${label.address.toString(16)}`);
opcode = label.address;
}
break;
}
}
const low = opcode & 0xf;
const mid = (opcode & 0xf0) >> 4;
const high = (opcode & 0xf00) >> 8;
const baseAddress = instruction.address * 3;
// We use reverse order because that's how the nibbles are in the ROM
threeNibbleBuffer[baseAddress] = high;
threeNibbleBuffer[baseAddress + 1] = mid;
threeNibbleBuffer[baseAddress + 2] = low;
}
return {
type: "some",
value: copyToOutputBuffer(threeNibbleBuffer, word16Align),
};
};
const copyToOutputBuffer = (
threeNibbleBuffer: number[],
word16Align: boolean
): Buffer => {
const bufferSize = word16Align ? 8192 * 2 : (8192 * 3) / 2;
const buffer = Buffer.alloc(bufferSize);
let byteBuffer = 0;
let bufferAddress = 0;
let lowNibble = false;
let evenByte = true;
for (let i = 0; i < threeNibbleBuffer.length; i++) {
const nibble = threeNibbleBuffer[i]!;
const writeSpacerValue = word16Align && !lowNibble && evenByte;
if (lowNibble || writeSpacerValue) {
// "Second", lower value of byte, or we're writing the spacer now
byteBuffer |= nibble;
buffer[bufferAddress] = byteBuffer;
bufferAddress += 1;
byteBuffer = 0;
evenByte = !evenByte;
} else {
// "First", upper value of byte
byteBuffer |= nibble << 4;
}
if (!writeSpacerValue) {
// We've moved to the next byte if we wrote a spacer, so stay at !lowNibble
lowNibble = !lowNibble;
}
}
return buffer;
};
/**
* Comsumes the opcode template from the BASS arch file and produces the actual output word
* @param template The opcode template from the BASS arch file
* @param argSize The number of bits in an argument to the opcode, if any
* @param argument The actual data to pass as an argument to the opcode, if any
* @returns The output opcode as a 12 bit word
*/
export const buildOpcode = (
template: string,
argSize: number,
argument: number
) => {
let index = 0;
let outputWord = 0;
while (index < template.length) {
const char = template[index];
if (char === "%") {
// Consume chars until whitespace
let data = 0;
let count = 0;
for (let i = 1; i < Math.min(13, template.length - index); i++) {
const nextChar = template[index + i]!;
if (nextChar !== "1" && nextChar !== "0") {
// Stop consuming
break;
}
data <<= 1;
data |= nextChar === "1" ? 1 : 0;
count += 1;
}
// Consume the next four chars as bits
outputWord <<= count;
outputWord |= data;
index += count + 1;
} else if (char === "=") {
if (template[index + 1] !== "a") {
console.log(
`ERROR: Unexpected char after = in instruction definition "${template}"`
);
return 0;
}
outputWord <<= argSize;
|
outputWord |= maskOfSize(argSize) & argument;
|
index += 2;
} else {
index += 1;
}
}
return outputWord;
};
|
src/lib/opcodeOutput.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/display.ts",
"retrieved_chunk": " const immediateSuffix = instructionString.substring(\n stringIndex + stringLength\n );\n let immediate = \"\";\n if (immediateLabel) {\n immediate = immediateLabel;\n } else {\n const argument = maskOfSize(bitCount) & actualWord;\n if (isLetterChar(immediatePrefix.charAt(immediatePrefix.length - 1))) {\n // If letter, treat as decimal",
"score": 0.8462275266647339
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " const matchString = numberMatch[0];\n // This is guaranteed to exist due to the regex\n const bitCount = parseNumber(numberMatch[1]!);\n const index = numberMatch.index;\n const instructionLine =\n originalInstruction.substring(0, index) +\n \"(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))\" +\n originalInstruction.substring(index + matchString.length);\n const sortableOpcode = buildSortableOpcode(opcodeString, bitCount);\n config.instructions.push({",
"score": 0.807207465171814
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " return;\n }\n program.matchedInstructions.push({\n type: \"immediate\",\n line,\n immediate: parseNumber(matches[1]),\n opcodeString: instruction.opcodeString,\n bitCount: instruction.immediate.bitCount,\n lineNumber,\n address,",
"score": 0.8054581880569458
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " });\n } else {\n // This is a literal\n const sortableOpcode = buildSortableOpcode(opcodeString, 0);\n config.instructions.push({\n type: \"literal\",\n regex: cleanAndFinishInstructionRegex(originalInstruction),\n opcodeString,\n sortableOpcode,\n originalInstruction: originalInstruction.trim(),",
"score": 0.8016042113304138
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " type: \"immediate\",\n regex: cleanAndFinishInstructionRegex(instructionLine),\n immediate: {\n bitCount,\n stringIndex: index,\n stringLength: matchString.length,\n },\n opcodeString,\n sortableOpcode,\n originalInstruction: originalInstruction.trim(),",
"score": 0.8007712364196777
}
] |
typescript
|
outputWord |= maskOfSize(argSize) & argument;
|
import { ImmediateInstruction, Instruction } from "./bass";
import { buildDisassembledInstructionString } from "./display";
import { DisassembledInstruction } from "./types";
import { maskOfSize } from "./util";
export const parseBinaryBuffer = (
buffer: Buffer,
instructions: Instruction[]
): string => {
const disassembledInstructions: DisassembledInstruction[] = [];
const unsetLabels: Array<DisassembledInstruction[] | undefined> = new Array(
8192
);
for (let i = 0; i < buffer.length; i += 2) {
const highByte = buffer[i]!;
const lowByte = buffer[i + 1]!;
const address = i / 2;
const correctedWord = (highByte << 8) | lowByte;
const instruction = findWordInstruction(correctedWord, instructions);
const disassembledInstruction: DisassembledInstruction = {
instruction,
actualWord: correctedWord,
address,
};
if (isFlowControlWithImmediate(instruction)) {
// Convert local address into global one
const pcLowerByte =
correctedWord & maskOfSize(instruction.immediate.bitCount);
let pcUpperFive = (address >> 8) & 0x1f;
if (isCalz(instruction)) {
// calz is only zero page and prevents pset
pcUpperFive = 0;
} else {
const lastInstruction =
disassembledInstructions[disassembledInstructions.length - 1]!;
if (isPset(lastInstruction.instruction)) {
// PSET immediate determines our upper 5 bits
pcUpperFive = lastInstruction.actualWord & 0x1f;
}
}
const pc = (pcUpperFive << 8) | pcLowerByte;
const existingLabel = unsetLabels[pc];
if (existingLabel) {
existingLabel.push(disassembledInstruction);
} else {
unsetLabels[pc] = [disassembledInstruction];
}
}
disassembledInstructions.push(disassembledInstruction);
}
// Build label names
let labelCount = 0;
const namedLabels: Array<
| {
name: string;
instructions: DisassembledInstruction[];
}
| undefined
> = unsetLabels.map((instructions) => {
if (!!instructions) {
return {
name: `label_${labelCount++}`,
instructions,
};
}
return undefined;
});
// Build list of instructions that will replace the immedates with these labels, and build labels
const labelUsageMap: Array<string | undefined> = new Array(8192);
for (const namedLabel of namedLabels) {
if (namedLabel) {
for (const instruction of namedLabel.instructions) {
|
labelUsageMap[instruction.address] = namedLabel.name;
|
}
}
}
let output = "";
let address = 0;
for (const instruction of disassembledInstructions) {
const immediateLabel = labelUsageMap[instruction.address];
const lineLabel = namedLabels[instruction.address];
if (lineLabel) {
output += `\n${lineLabel.name}:\n`;
}
output += ` ${buildDisassembledInstructionString(
instruction,
immediateLabel
)}\n`;
address += 1;
}
return output;
};
const findWordInstruction = (word: number, instructions: Instruction[]) => {
// Naive because it doesn't really matter
let bestMatch = instructions[0]!;
for (let i = 0; i < instructions.length; i++) {
const instruction = instructions[i]!;
if (instruction.sortableOpcode <= word) {
bestMatch = instruction;
} else {
// We've passed the best solution, end
break;
}
}
return bestMatch;
};
const flowControlImmediateMnemonics = ((): Set<string> =>
new Set<string>(["call", "calz", "jp"]))();
const extractMnemonic = (instruction: Instruction): string =>
instruction.originalInstruction.split(/\s/)[0]!.trim();
const isFlowControlWithImmediate = (
instruction: Instruction
): instruction is ImmediateInstruction => {
const mnemonic = extractMnemonic(instruction);
return flowControlImmediateMnemonics.has(mnemonic);
};
const isPset = (instruction: Instruction): boolean => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "pset";
};
const isCalz = (instruction: Instruction) => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "calz";
};
|
src/lib/disassembly.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " instruction.immediate\n );\n break;\n }\n case \"label\": {\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }",
"score": 0.8446858525276184
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " } else {\n // Label\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }\n console.log(`${label.address.toString(16)}`);\n opcode = label.address;\n }",
"score": 0.8352570533752441
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " lineNumber,\n address,\n });\n } else if (label !== undefined) {\n program.matchedInstructions.push({\n type: \"constant\",\n subtype: \"label\",\n label,\n line,\n lineNumber,",
"score": 0.8305351734161377
},
{
"filename": "src/lib/types.ts",
"retrieved_chunk": " >;\n matchedLabels: {\n [name: string]: {\n lineNumber: number;\n instructionIndex: number;\n address: number;\n };\n };\n unmatchedLabels: Array<{\n label: string;",
"score": 0.8280002474784851
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " });\n } else if (matches[2] !== undefined) {\n // potential label\n if (instruction.type !== \"immediate\") {\n log(\n \"Attempted to match content with non-immediate instruction\",\n lineNumber\n );\n return;\n }",
"score": 0.8230992555618286
}
] |
typescript
|
labelUsageMap[instruction.address] = namedLabel.name;
|
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
instructionSet: InstructionSet,
program: AssembledProgram
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
if (hasInstruction && program.unmatchedLabels.length > 0) {
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program.matchedLabels[label.label];
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
const matches = labelRegex.exec(line);
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
await
|
readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
|
const outputBuffer = outputInstructions(program, word16Align);
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
|
src/assembler.ts
|
agg23-tamagotchi-disassembled-421eacb
|
[
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": " await readByLines(path, (line, lineNumber) =>\n parseArchLine(line, lineNumber, instructionSet)\n );\n return instructionSet;\n};",
"score": 0.9076731204986572
},
{
"filename": "src/disassembler.ts",
"retrieved_chunk": "const archPath = path.join(__dirname, \"../bass/6200.arch\");\nconst inputFile = argv[2] as string;\nconst outputFile = argv[3] as string;\nconst build = async () => {\n const instructionSet = await readArch(archPath);\n const sortedInstructions = instructionSet.instructions.sort(\n (a, b) => a.sortableOpcode - b.sortableOpcode\n );\n const buffer = readFileSync(inputFile);\n const outputString = parseBinaryBuffer(buffer, sortedInstructions);",
"score": 0.8760507106781006
},
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": "};\n/**\n * Reads and parses the BASS arch file\n * @param path The path of the arch file\n * @returns The InstructionSet resulting from parsing the arch file\n */\nexport const readArch = async (path: string): Promise<InstructionSet> => {\n const instructionSet: InstructionSet = {\n instructions: [],\n };",
"score": 0.8490470051765442
},
{
"filename": "src/disassembler.ts",
"retrieved_chunk": "import { readFileSync, writeFileSync } from \"fs\";\nimport path from \"path\";\nimport { argv } from \"process\";\nimport { parseBinaryBuffer } from \"./lib/disassembly\";\nimport { readArch } from \"./lib/fs\";\nif (argv.length != 4) {\n console.log(`Received ${argv.length - 2} arguments. Expected 2\\n`);\n console.log(\"Usage: node disassembler.js [input.bin] [output.asm]\");\n process.exit(1);\n}",
"score": 0.8356885313987732
},
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": "import fs from \"fs\";\nimport readline from \"readline\";\nimport events from \"events\";\nimport { InstructionSet, parseArchLine } from \"./bass\";\n/**\n * Opens a file and streams it out, line by line\n * @param path The path of the file to read\n * @param onLine A callback used to respond to each line content and its line number\n */\nexport const readByLines = async (",
"score": 0.8290220499038696
}
] |
typescript
|
readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.