-
Notifications
You must be signed in to change notification settings - Fork 74
Add granular connector caching #1552
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Closed
Closed
Changes from 7 commits
Commits
Show all changes
10 commits
Select commit
Hold shift + click to select a range
5fadcdb
Update connector and inbound backup cache
IsuruMaduranga 93d395a
Improve connector fetching
IsuruMaduranga 11fe71e
Enhance connector operation handling
IsuruMaduranga 8cfaeef
Refactor connector tool action handling to dynamically fetch connecto…
IsuruMaduranga 732a57a
Stabilize proper connector intialization
IsuruMaduranga 45914a1
Update session storage version to 1.0, implement fresh startup sessio…
IsuruMaduranga 8c96551
Add script to update connector context database with fetching and pro…
IsuruMaduranga 97c1fa6
Fix PR comments by github copilot
IsuruMaduranga ba86b8b
Enhance connector cache management
IsuruMaduranga 0b0b0cb
Improve error handling in JSON parsing and enhance connector store ca…
IsuruMaduranga File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
372 changes: 372 additions & 0 deletions
372
workspaces/mi/mi-extension/scripts/update-connector-context-db.js
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,372 @@ | ||
| #!/usr/bin/env node | ||
|
|
||
| /* eslint-disable no-console */ | ||
|
|
||
| const fs = require('fs/promises'); | ||
| const path = require('path'); | ||
|
|
||
| const API_BASE = process.env.CONNECTOR_STORE_BASE_URL | ||
| || 'https://apis.wso2.com/qgpf/connector-store-backend/endpoint-9090-803/v1.0'; | ||
| const SUMMARY_URL_TEMPLATE = `${API_BASE}/connectors/summaries?type=\${type}&limit=100&offset=0&product=MI`; | ||
| const DETAILS_URL = `${API_BASE}/connectors/details/filter`; | ||
| const PRODUCT = 'MI'; | ||
| const RUNTIME_VERSION = process.env.MI_RUNTIME_VERSION || '4.5.0'; | ||
| const MAX_NAMES_PER_REQUEST = 3; | ||
| const REQUEST_TIMEOUT_MS = 120000; | ||
| const RETRY_COUNT = 3; | ||
| const RETRY_DELAY_MS = 1250; | ||
| const BATCH_DELAY_MS = 250; | ||
|
|
||
| const CONTEXT_DIR = path.resolve(__dirname, '../src/ai-features/agent-mode/context'); | ||
| const TARGETS = [ | ||
| { type: 'Connector', fileName: 'connector_db.ts', exportName: 'CONNECTOR_DB' }, | ||
| { type: 'Inbound', fileName: 'inbound_db.ts', exportName: 'INBOUND_DB' }, | ||
| ]; | ||
|
|
||
| function sleep(ms) { | ||
| return new Promise((resolve) => setTimeout(resolve, ms)); | ||
| } | ||
|
|
||
| function chunkArray(items, size) { | ||
| const chunks = []; | ||
| for (let i = 0; i < items.length; i += size) { | ||
| chunks.push(items.slice(i, i + size)); | ||
| } | ||
| return chunks; | ||
| } | ||
|
|
||
| function getConnectorName(item) { | ||
| if (!item || typeof item !== 'object') { | ||
| return ''; | ||
| } | ||
|
|
||
| const rawName = item.connectorName || item.connector_name || item.name; | ||
| if (typeof rawName !== 'string') { | ||
| return ''; | ||
| } | ||
|
|
||
| return rawName.trim(); | ||
| } | ||
|
|
||
| function normalizeArrayPayload(payload, label) { | ||
| if (Array.isArray(payload)) { | ||
| return payload; | ||
| } | ||
|
|
||
| if (payload && typeof payload === 'object') { | ||
| if (Array.isArray(payload.data)) { | ||
| return payload.data; | ||
| } | ||
| if (Array.isArray(payload.items)) { | ||
| return payload.items; | ||
| } | ||
| if (Array.isArray(payload.connectors)) { | ||
| return payload.connectors; | ||
| } | ||
| } | ||
|
|
||
| throw new Error(`${label} payload is not an array.`); | ||
| } | ||
|
|
||
| async function fetchWithTimeout(url, init) { | ||
| const controller = new AbortController(); | ||
| const timeout = setTimeout(() => controller.abort(), REQUEST_TIMEOUT_MS); | ||
| try { | ||
| return await fetch(url, { | ||
| ...init, | ||
| signal: controller.signal, | ||
| }); | ||
| } finally { | ||
| clearTimeout(timeout); | ||
| } | ||
| } | ||
|
|
||
| async function parseResponse(response, label) { | ||
| const text = await response.text(); | ||
|
|
||
| if (!response.ok) { | ||
| const bodySnippet = text ? ` - ${text.slice(0, 300)}` : ''; | ||
| throw new Error(`${label} failed: HTTP ${response.status} ${response.statusText}${bodySnippet}`); | ||
| } | ||
|
|
||
| if (text.trim().length === 0) { | ||
| return []; | ||
| } | ||
|
|
||
| try { | ||
| return JSON.parse(text); | ||
| } catch { | ||
| throw new Error(`${label} returned non-JSON content.`); | ||
| } | ||
| } | ||
|
|
||
| async function requestJson(url, init, label) { | ||
| let lastError; | ||
|
|
||
| for (let attempt = 1; attempt <= RETRY_COUNT; attempt++) { | ||
| try { | ||
| const response = await fetchWithTimeout(url, init); | ||
| return await parseResponse(response, label); | ||
| } catch (error) { | ||
IsuruMaduranga marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| lastError = error; | ||
| if (attempt < RETRY_COUNT) { | ||
| console.warn(`${label} attempt ${attempt}/${RETRY_COUNT} failed. Retrying...`); | ||
| await sleep(RETRY_DELAY_MS * attempt); | ||
| } | ||
| } | ||
| } | ||
|
|
||
| throw lastError; | ||
| } | ||
|
|
||
| async function fetchSummaries(type) { | ||
| const summaryUrl = SUMMARY_URL_TEMPLATE.replace('${type}', encodeURIComponent(type)); | ||
| const payload = await requestJson( | ||
| summaryUrl, | ||
| { | ||
| method: 'GET', | ||
| headers: { | ||
| Accept: 'application/json', | ||
| }, | ||
| }, | ||
| `${type} summaries` | ||
| ); | ||
|
|
||
| return normalizeArrayPayload(payload, `${type} summaries`); | ||
| } | ||
|
|
||
| function extractUniqueNames(summaries, type) { | ||
| const names = []; | ||
| const seen = new Set(); | ||
|
|
||
| for (const summary of summaries) { | ||
| const name = getConnectorName(summary); | ||
| if (!name || seen.has(name)) { | ||
| continue; | ||
| } | ||
| seen.add(name); | ||
| names.push(name); | ||
| } | ||
|
|
||
| if (names.length === 0) { | ||
| throw new Error(`No ${type} names found from summaries.`); | ||
| } | ||
|
|
||
| return names; | ||
| } | ||
|
|
||
| function getConnectorDescription(item) { | ||
| if (!item || typeof item !== 'object') { | ||
| return ''; | ||
| } | ||
|
|
||
| const rawDescription = item.description; | ||
| return typeof rawDescription === 'string' ? rawDescription : ''; | ||
| } | ||
|
|
||
| function getConnectorTypeValue(item, fallbackType) { | ||
| if (!item || typeof item !== 'object') { | ||
| return fallbackType; | ||
| } | ||
|
|
||
| const rawType = item.connectorType || item.connector_type; | ||
| if (typeof rawType === 'string' && rawType.trim().length > 0) { | ||
| return rawType.trim(); | ||
| } | ||
|
|
||
| return fallbackType; | ||
| } | ||
|
|
||
| function createSummaryFallbackRecord(name, summary, type) { | ||
| const baseRecord = { | ||
| connectorName: name, | ||
| repoName: '', | ||
| description: getConnectorDescription(summary), | ||
| connectorType: getConnectorTypeValue(summary, type), | ||
| mavenGroupId: '', | ||
| mavenArtifactId: '', | ||
| version: { | ||
| tagName: '', | ||
| releaseId: '', | ||
| isLatest: true, | ||
| isDeprecated: false, | ||
| operations: [], | ||
| connections: [], | ||
| }, | ||
| otherVersions: {}, | ||
| connectorRank: 0, | ||
| iconUrl: '', | ||
| }; | ||
|
|
||
| if (type === 'Inbound') { | ||
| return { | ||
| ...baseRecord, | ||
| id: '', | ||
| }; | ||
| } | ||
|
|
||
| return baseRecord; | ||
| } | ||
|
|
||
| async function fetchDetailsBatch(type, connectorNames) { | ||
| const payload = await requestJson( | ||
| DETAILS_URL, | ||
| { | ||
| method: 'POST', | ||
| headers: { | ||
| Accept: 'application/json', | ||
| 'Content-Type': 'application/json', | ||
| }, | ||
| body: JSON.stringify({ | ||
| connectorNames, | ||
| runtimeVersion: RUNTIME_VERSION, | ||
| product: PRODUCT, | ||
| latest: true, | ||
| }), | ||
| }, | ||
| `${type} details (${connectorNames.join(', ')})` | ||
| ); | ||
|
|
||
| return normalizeArrayPayload(payload, `${type} details`); | ||
| } | ||
|
|
||
| async function fetchAllDetails(type, names) { | ||
| const detailsByName = new Map(); | ||
| let missing = names.slice(); | ||
| const maxPasses = 3; | ||
|
|
||
| for (let pass = 1; pass <= maxPasses && missing.length > 0; pass++) { | ||
| if (pass > 1) { | ||
| console.warn(`[${type}] retry pass ${pass} for ${missing.length} missing item(s).`); | ||
| } | ||
|
|
||
| const batches = chunkArray(missing, MAX_NAMES_PER_REQUEST); | ||
| for (let i = 0; i < batches.length; i++) { | ||
| const batch = batches[i]; | ||
| console.log(`[${type}] details batch ${i + 1}/${batches.length} with ${batch.length} item(s).`); | ||
|
|
||
| try { | ||
| const batchDetails = await fetchDetailsBatch(type, batch); | ||
| for (const detail of batchDetails) { | ||
| const name = getConnectorName(detail); | ||
| if (name) { | ||
| detailsByName.set(name, detail); | ||
| } | ||
| } | ||
| } catch (error) { | ||
| const message = error instanceof Error ? error.message : String(error); | ||
| console.warn(`[${type}] batch failed and will be retried in next pass: ${message}`); | ||
| } | ||
|
|
||
| await sleep(BATCH_DELAY_MS); | ||
| } | ||
|
|
||
| missing = names.filter((name) => !detailsByName.has(name)); | ||
| } | ||
|
|
||
| if (missing.length > 0) { | ||
| console.warn(`[${type}] missing API details for ${missing.length} item(s): ${missing.join(', ')}`); | ||
| } | ||
|
|
||
| return { detailsByName, missing }; | ||
| } | ||
|
|
||
| async function readExistingRecordsByName(filePath, exportName) { | ||
| const existing = await fs.readFile(filePath, 'utf8'); | ||
| const exportIndex = existing.indexOf(`export const ${exportName} =`); | ||
| if (exportIndex < 0) { | ||
| throw new Error(`Could not find export declaration for ${exportName} in ${filePath}.`); | ||
| } | ||
|
|
||
| const arrayStart = existing.indexOf('[', exportIndex); | ||
| const arrayEnd = existing.lastIndexOf(']'); | ||
| if (arrayStart < 0 || arrayEnd < 0 || arrayEnd < arrayStart) { | ||
| throw new Error(`Could not parse array contents from ${filePath}.`); | ||
| } | ||
|
|
||
| const parsed = JSON.parse(existing.slice(arrayStart, arrayEnd + 1)); | ||
| if (!Array.isArray(parsed)) { | ||
| throw new Error(`Parsed existing data from ${filePath} is not an array.`); | ||
| } | ||
|
|
||
| const recordsByName = new Map(); | ||
| for (const record of parsed) { | ||
| const name = getConnectorName(record); | ||
| if (name) { | ||
| recordsByName.set(name, record); | ||
| } | ||
| } | ||
|
|
||
| return recordsByName; | ||
| } | ||
coderabbitai[bot] marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
|
||
| async function writeTsArrayFile(filePath, exportName, records) { | ||
| const existing = await fs.readFile(filePath, 'utf8'); | ||
| const exportRegex = new RegExp(`^[\\s\\S]*?export const\\s+${exportName}\\s*=\\s*`); | ||
| const match = existing.match(exportRegex); | ||
|
|
||
| if (!match) { | ||
| throw new Error(`Could not find export declaration for ${exportName} in ${filePath}.`); | ||
| } | ||
|
|
||
| const content = `${match[0]}${JSON.stringify(records, null, 4)}\n`; | ||
IsuruMaduranga marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| await fs.writeFile(filePath, content, 'utf8'); | ||
| } | ||
|
|
||
| async function updateTarget(target) { | ||
| const { type, fileName, exportName } = target; | ||
| const filePath = path.join(CONTEXT_DIR, fileName); | ||
|
|
||
| console.log(`\n=== Updating ${type} definitions ===`); | ||
| const summaries = await fetchSummaries(type); | ||
| const names = extractUniqueNames(summaries, type); | ||
| const summariesByName = new Map(summaries.map((summary) => [getConnectorName(summary), summary])); | ||
| console.log(`[${type}] fetched ${summaries.length} summaries, ${names.length} unique names.`); | ||
|
|
||
| const { detailsByName, missing } = await fetchAllDetails(type, names); | ||
|
|
||
| if (missing.length > 0) { | ||
| const existingRecordsByName = await readExistingRecordsByName(filePath, exportName); | ||
| let fallbackCount = 0; | ||
| let summaryFallbackCount = 0; | ||
|
|
||
| for (const name of missing) { | ||
| const fallbackRecord = existingRecordsByName.get(name); | ||
| if (fallbackRecord) { | ||
| detailsByName.set(name, fallbackRecord); | ||
| fallbackCount++; | ||
| } else { | ||
| const summary = summariesByName.get(name); | ||
| detailsByName.set(name, createSummaryFallbackRecord(name, summary, type)); | ||
| summaryFallbackCount++; | ||
| } | ||
| } | ||
|
|
||
| console.warn( | ||
| `[${type}] used fallback records for ${fallbackCount} item(s) and summary-only placeholders for ${summaryFallbackCount} item(s).` | ||
| ); | ||
| } | ||
|
|
||
| const details = names.map((name) => detailsByName.get(name)).filter(Boolean); | ||
| console.log(`[${type}] fetched ${details.length} detailed records.`); | ||
|
|
||
| await writeTsArrayFile(filePath, exportName, details); | ||
| console.log(`[${type}] wrote ${filePath}`); | ||
| } | ||
|
|
||
| async function main() { | ||
| if (MAX_NAMES_PER_REQUEST > 3) { | ||
| throw new Error('MAX_NAMES_PER_REQUEST must be 3 or less to avoid backend overload.'); | ||
| } | ||
|
|
||
| for (const target of TARGETS) { | ||
| await updateTarget(target); | ||
| } | ||
| } | ||
|
|
||
| if (require.main === module) { | ||
| main().catch((error) => { | ||
| console.error(`Failed to update connector context DB files: ${error instanceof Error ? error.message : String(error)}`); | ||
| process.exit(1); | ||
| }); | ||
| } | ||
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.