diff --git a/.DS_Store b/.DS_Store
index 3b298125..1e7a2d81 100644
Binary files a/.DS_Store and b/.DS_Store differ
diff --git a/.gitignore b/.gitignore
index 5f7c2cf1..33a0a665 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,3 +2,60 @@
/embedding_qs_series_2/node_modules
/embedding_jwt/jwt/node_modules
embedding_qs_series_2/.env
+
+# Node.js dependencies - prevent large file issues
+node_modules/
+**/node_modules/
+
+# Next.js build artifacts and binaries - prevent 123MB file issues
+.next/
+.next/**
+out/
+build/
+dist/
+
+# Large Next.js native binaries that caused previous PR failures
+node_modules/@next/swc-darwin-arm64/next-swc.darwin-arm64.node
+node_modules/@next/swc-darwin-x64/next-swc.darwin-x64.node
+node_modules/@next/swc-linux-x64-gnu/next-swc.linux-x64-gnu.node
+node_modules/@next/swc-linux-x64-musl/next-swc.linux-x64-musl.node
+node_modules/@next/swc-win32-x64-msvc/next-swc.win32-x64-msvc.node
+
+# Environment files
+.env
+.env.local
+.env.production
+.env.development
+**/.env
+
+# IDE and editor files
+.vscode/
+.idea/
+*.swp
+*.swo
+
+# Logs
+logs/
+*.log
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+
+# Runtime data
+pids/
+*.pid
+*.seed
+*.pid.lock
+
+# Coverage directory used by tools like istanbul
+coverage/
+*.lcov
+
+# OS generated files
+.DS_Store
+.DS_Store?
+._*
+.Spotlight-V100
+.Trashes
+ehthumbs.db
+Thumbs.db
diff --git a/recipe-portal/.eslintrc.json b/recipe-portal/.eslintrc.json
new file mode 100644
index 00000000..0e81f9b9
--- /dev/null
+++ b/recipe-portal/.eslintrc.json
@@ -0,0 +1,3 @@
+{
+ "extends": "next/core-web-vitals"
+}
\ No newline at end of file
diff --git a/recipe-portal/.gitignore b/recipe-portal/.gitignore
new file mode 100644
index 00000000..46ad8fe7
--- /dev/null
+++ b/recipe-portal/.gitignore
@@ -0,0 +1,45 @@
+# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
+
+# dependencies
+/node_modules
+/.pnp
+.pnp.js
+
+# testing
+/coverage
+
+# next.js
+/.next/
+/out/
+
+# production
+/build
+
+# misc
+.DS_Store
+*.pem
+
+# debug
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+
+# local env files
+.env*.local
+
+# vercel
+.vercel
+
+# typescript
+*.tsbuildinfo
+next-env.d.ts
+
+# Sigma API encrypted credentials (security)
+.sigma-portal/
+sigma-portal-keys.json
+
+# Environment files
+.env
+
+# Claude config
+/recipe-portal/.claude
\ No newline at end of file
diff --git a/recipe-portal/README.md b/recipe-portal/README.md
new file mode 100644
index 00000000..ce05f86e
--- /dev/null
+++ b/recipe-portal/README.md
@@ -0,0 +1,129 @@
+# QuickStarts API Toolkit
+Experiment with Sigma API calls and learn common request flows
+
+## Features
+
+### Recipes:
+- **Smart Parameter Detection**: Automatically detects and provides dropdown selection for Sigma resources (teams, members, workbooks, etc.)
+- **Interactive Execution**: Run recipes directly in the browser with real-time results
+- **Parameter Summary**: View which parameters were used in each request
+- **Code Viewing**: Browse the actual JavaScript code for each recipe
+
+### Quick API Explorer:
+- **Common Endpoints**: Curated list of the most useful Sigma API endpoints
+- **Zero Setup**: List endpoints require no parameters - perfect for quick exploration
+- **One Parameter**: Detail endpoints need just one ID to get specific resource information
+- **Alphabetical Organization**: Easy to find the endpoint you need
+
+## Authentication & Config Management
+
+### Smart Config System:
+- **Complete Configuration Storage**: Server endpoints + API credentials stored together as named "configs"
+- **Multi-Environment Support**: Easily switch between Production, Staging, Development environments
+- **One-Click Environment Switching**: Load complete configurations instantly
+- **Encrypted Local Storage**: AES-256 encryption for credential security
+
+### Config Management Features:
+- **Quick Start**: Load saved configs with one click - no manual entry needed
+- **Create New Configs**: Mix and match server endpoints with credentials
+- **Update Existing Configs**: Modify and save changes to existing configurations
+- **Delete Configs**: Remove configs you no longer need
+- **Auto-Save**: Configs saved automatically during authentication when enabled
+- **Manual Save**: Explicit save button for immediate config storage
+
+### Token Management:
+- **File-Based Storage**: Authentication tokens cached in system temp directory
+- **Persistent Sessions**: Tokens survive browser/server restarts for the full hour
+- **Automatic Expiration**: Tokens expire after 1 hour (Sigma's standard lifetime)
+- **Auto-Cleanup**: Expired tokens automatically detected and removed
+- **Manual Session End**: Clear authentication anytime with šŖ End Session button
+
+### Storage Locations
+
+**Config Storage (encrypted)**:
+- **macOS**: `~/Library/Application Support/.sigma-portal/encrypted-keys.json`
+- **Windows**: `%APPDATA%\.sigma-portal\encrypted-keys.json`
+- **Linux**: `~/.config/.sigma-portal/encrypted-keys.json`
+
+**Token Cache (temporary)**:
+- **macOS**: `/var/folders/.../sigma-portal-token.json`
+- **Windows**: `%TEMP%\sigma-portal-token.json`
+- **Linux**: `/tmp/sigma-portal-token.json`
+
+### Developer Experience Benefits
+- **Environment Switching**: Instant switch between Production ā Staging ā Development
+- **Zero Re-entry**: Load complete configs without typing credentials repeatedly
+- **Secure Storage**: Military-grade AES-256 encryption for stored credentials
+- **Clean Separation**: Configs stored outside project directory (never committed to git)
+- **Visual Feedback**: Clear indicators show saved/unsaved state and notifications
+- **Flexible Workflow**: Session-only credentials OR persistent named configs
+
+### Config Workflow
+1. **First Time**: Enter server endpoint + credentials ā Save as named config (e.g., "Production")
+2. **Daily Use**: Quick Start ā Select "Production" ā Instantly loaded and ready
+3. **Environment Switch**: Quick Start ā Select "Staging" ā Switched in one click
+4. **New Environment**: "⨠New Config" ā Enter details ā Save with new name
+
+## Getting Started
+Sigma_QuickStart_Public_Repo
+
+
+1. **Setup**: `npm install && npm run dev`
+2. **First-Time Config**: Open any recipe ā **Config** tab ā Enter server endpoint + credentials ā Save as named config
+3. **Daily Use**: **Quick Start** section ā Select your saved config ā Ready to go!
+4. **Explore**: Use the ā” Quick API tab to explore common endpoints with smart parameters
+5. **Run Recipes**: Browse recipes by category and execute them with real-time results
+
+### Config Tab Features
+- **Quick Start**: Load saved configs instantly (appears when configs exist)
+- **Server Endpoint**: Choose your Sigma organization's server location
+- **API Credentials**: Enter Client ID and Client Secret
+- **Config Storage**: Save complete configurations with names like "Production", "Staging"
+- **Save Config**: Manual save button for immediate storage
+- **New Config**: Clear form to create fresh configurations
+- **Delete**: Remove configs you no longer need (šļø button when config selected)
+
+## Requirements
+- Node.js 18+
+- Sigma API credentials (Client ID and Secret)
+- Valid Sigma organization access
+
+## Development
+```bash
+npm install
+npm run dev
+```
+
+Navigate to `http://localhost:3001` to start exploring the Sigma API.
+
+## Project Structure
+```
+recipe-portal/
+āāā app/ # Next.js app directory
+ā āāā api/ # API routes
+ā ā āāā execute/ # Recipe execution
+ā ā āāā resources/ # Resource fetching for dropdowns
+ā ā āāā keys/ # Config management (CRUD operations)
+ā ā āāā token/ # Token management & caching
+ā ā āāā call/ # Quick API endpoint calls
+āāā components/ # React components
+ā āāā QuickApiExplorer.tsx # Quick API exploration interface
+ā āāā QuickApiModal.tsx # API endpoint execution modal
+ā āāā SmartParameterForm.tsx # Smart parameter detection & forms
+ā āāā CodeViewer.tsx # Recipe viewer with Config tab
+ā āāā AuthRecipeCard.tsx # Authentication recipe card
+ā āāā RecipeCard.tsx # Standard recipe cards
+āāā lib/ # Utilities
+ā āāā smartParameters.ts # Parameter detection logic
+ā āāā keyStorage.ts # Encrypted config storage
+ā āāā recipeScanner.ts # Recipe discovery & analysis
+āāā recipes/ # Self-contained recipe files (copied from sigma-api-recipes)
+ āāā connections/ # Connection-related recipes
+ āāā members/ # Member management recipes
+ āāā teams/ # Team management recipes
+ āāā workbooks/ # Workbook operations
+ āāā embedding/ # Embedding examples
+ āāā get-access-token.js # Authentication helper
+```
+
+For setup instructions and API credential creation, visit the QuickStart: [Sigma REST API Recipes](https://quickstarts.sigmacomputing.com/guide/developers_api_code_samples/index.html?index=..%2F..index#0)
\ No newline at end of file
diff --git a/recipe-portal/app/api/call/route.ts b/recipe-portal/app/api/call/route.ts
new file mode 100644
index 00000000..2cf67e0a
--- /dev/null
+++ b/recipe-portal/app/api/call/route.ts
@@ -0,0 +1,168 @@
+import { NextResponse } from 'next/server';
+import axios from 'axios';
+import fs from 'fs';
+import path from 'path';
+import os from 'os';
+
+// Configuration-specific token caching (matches other working routes)
+function getTokenCacheFile(clientId: string) {
+ const configHash = clientId ? clientId.substring(0, 8) : 'default';
+ return path.join(os.tmpdir(), `sigma-portal-token-${configHash}.json`);
+}
+
+function getCachedToken(): { token: string; clientId: string } | null {
+ try {
+ // Look for the most recent valid token across all configurations
+ const tempDir = os.tmpdir();
+ const files = fs.readdirSync(tempDir);
+ const tokenFiles = files.filter(file => file.startsWith('sigma-portal-token-') && file.endsWith('.json'));
+
+ let mostRecentToken = null;
+ let mostRecentTime = 0;
+
+ for (const file of tokenFiles) {
+ try {
+ const filePath = path.join(tempDir, file);
+ const tokenData = JSON.parse(fs.readFileSync(filePath, 'utf8'));
+ const now = Date.now();
+
+ // Check if token is still valid (not expired)
+ if (tokenData.expiresAt && now < tokenData.expiresAt) {
+ const lastAccessTime = tokenData.lastAccessed || tokenData.createdAt;
+
+ if (lastAccessTime > mostRecentTime) {
+ mostRecentTime = lastAccessTime;
+ mostRecentToken = {
+ token: tokenData.token,
+ clientId: tokenData.clientId
+ };
+ }
+ } else {
+ // Token expired, remove file
+ fs.unlinkSync(filePath);
+ }
+ } catch (err) {
+ // Skip invalid token files
+ }
+ }
+
+ return mostRecentToken;
+ } catch (error) {
+ // Ignore errors, just return null
+ }
+ return null;
+}
+
+export async function POST(request: Request) {
+ try {
+ const { endpoint, method, parameters = {}, requestBody } = await request.json();
+
+ if (!endpoint) {
+ return NextResponse.json(
+ { error: 'Endpoint is required' },
+ { status: 400 }
+ );
+ }
+
+ // Get cached token
+ const tokenData = getCachedToken();
+ if (!tokenData) {
+ return NextResponse.json(
+ {
+ error: 'Authentication required',
+ message: 'No valid authentication token found. Please authenticate first.'
+ },
+ { status: 401 }
+ );
+ }
+
+ // Build the full URL
+ const baseURL = process.env.SIGMA_BASE_URL || 'https://aws-api.sigmacomputing.com/v2';
+ let url = `${baseURL}${endpoint}`;
+
+ // Add query parameters
+ if (parameters.query && Object.keys(parameters.query).length > 0) {
+ const queryParams = new URLSearchParams();
+ Object.entries(parameters.query).forEach(([key, value]) => {
+ if (value !== undefined && value !== '') {
+ queryParams.append(key, String(value));
+ }
+ });
+ if (queryParams.toString()) {
+ url += `?${queryParams.toString()}`;
+ }
+ }
+
+ // Prepare headers
+ const headers: Record
`;
+ })
+ // Headers
+ .replace(/^### (.+)$/gm, '${code.replace(//g, '>')}$1
')
+ .replace(/^## (.+)$/gm, '$1
')
+ .replace(/^# (.+)$/gm, '$1
')
+ // Inline code
+ .replace(/`([^`]+)`/g, '$1')
+ // Links
+ .replace(/\[([^\]]+)\]\(([^)]+)\)/g, '$1')
+ // Bold text
+ .replace(/\*\*([^*]+)\*\*/g, '$1')
+ // Lists
+ .replace(/^- (.+)$/gm, '${items}
`;
+ })
+ // Convert double line breaks to paragraph breaks
+ .replace(/\n\s*\n/g, '
') + // Wrap remaining content in paragraphs + .replace(/^(?![<])/gm, '
') + // Clean up paragraph wrapping around headers and other elements + .replace(/
(<[h123]|
|<\/pre>|<\/ul>)/g, '$1') + // Remove trailing paragraph tags + .replace(/<\/p>$/g, '') + // Remove empty paragraphs + .replace(/
<\/p>/g, ''); +} + +export async function GET(request: Request) { + try { + const { searchParams } = new URL(request.url); + const readmePath = searchParams.get('path'); + const format = searchParams.get('format'); // Check if HTML format is requested + + if (!readmePath) { + return NextResponse.json( + { error: 'README path is required' }, + { status: 400 } + ); + } + + // Security check: ensure the file is within the recipes directory or is the main README + const recipesPath = path.join(process.cwd(), 'recipes'); + const mainReadmePath = path.join(process.cwd(), 'README.md'); + const resolvedPath = path.resolve(readmePath); + const resolvedRecipesPath = path.resolve(recipesPath); + const resolvedMainReadmePath = path.resolve(mainReadmePath); + + if (!resolvedPath.startsWith(resolvedRecipesPath) && resolvedPath !== resolvedMainReadmePath) { + return NextResponse.json( + { error: 'Access denied: File must be within recipes directory or be the main README' }, + { status: 403 } + ); + } + + // Check if file exists + if (!fs.existsSync(resolvedPath)) { + return NextResponse.json( + { error: 'README file not found' }, + { status: 404 } + ); + } + + const content = fs.readFileSync(resolvedPath, 'utf-8'); + + // If accessed directly in browser (no explicit JSON format requested), return HTML + if (format !== 'json') { + const htmlContent = ` + +
+Recipe Instructions + + + + + ā Close +${convertMarkdownToHtml(content)}+ +`; + + return new NextResponse(htmlContent, { + headers: { + 'Content-Type': 'text/html; charset=utf-8', + }, + }); + } + + // Return JSON for API calls + return NextResponse.json({ + content, + success: true + }); + + } catch (error) { + console.error('Error reading README file:', error); + return NextResponse.json( + { error: 'Failed to read README file' }, + { status: 500 } + ); + } +} \ No newline at end of file diff --git a/recipe-portal/app/api/recipes/route.ts b/recipe-portal/app/api/recipes/route.ts new file mode 100644 index 00000000..0ca059dc --- /dev/null +++ b/recipe-portal/app/api/recipes/route.ts @@ -0,0 +1,21 @@ +import { NextResponse } from 'next/server'; +import { scanAllRecipes, getAuthRecipe } from '../../../lib/recipeScanner'; + +export async function GET() { + try { + const categories = scanAllRecipes(); + const authRecipe = getAuthRecipe(); + + return NextResponse.json({ + categories, + authRecipe, + timestamp: new Date().toISOString() + }); + } catch (error) { + console.error('Error in recipes API:', error); + return NextResponse.json( + { error: 'Failed to scan recipes' }, + { status: 500 } + ); + } +} \ No newline at end of file diff --git a/recipe-portal/app/api/resources/route.ts b/recipe-portal/app/api/resources/route.ts new file mode 100644 index 00000000..ac45f521 --- /dev/null +++ b/recipe-portal/app/api/resources/route.ts @@ -0,0 +1,281 @@ +import { NextResponse } from 'next/server'; +import axios from 'axios'; + +// Base resource fetching function +async function fetchWithAuth(endpoint: string, token: string) { + try { + const baseURL = process.env.SIGMA_BASE_URL || 'https://aws-api.sigmacomputing.com/v2'; + const url = `${baseURL}${endpoint}`; + console.log(`Fetching: ${url}`); + const response = await axios.get(url, { + headers: { + 'Authorization': `Bearer ${token}`, + 'Accept': 'application/json' + } + }); + console.log(`Response status for ${endpoint}:`, response.status); + return response.data; + } catch (error) { + console.error(`Error fetching ${endpoint}:`, (error as any).response?.data || (error as any).message); + throw error; + } +} + +export async function GET(request: Request) { + try { + const { searchParams } = new URL(request.url); + const type = searchParams.get('type'); + const token = searchParams.get('token'); + + if (!token) { + return NextResponse.json( + { error: 'Authentication token is required' }, + { status: 401 } + ); + } + + if (!type) { + return NextResponse.json( + { error: 'Resource type is required. Use: teams, members, workbooks, connections, workspaces, bookmarks, templates, datasets, dataModels, accountTypes, workbookElements, materializationSchedules' }, + { status: 400 } + ); + } + + let data: any; + let transformedData: any[]; + + switch (type) { + case 'teams': + data = await fetchWithAuth('/teams', token); + transformedData = (data.entries || data).map((team: any) => ({ + id: team.teamId, + name: team.name, + description: team.description || '', + memberCount: team.memberCount || 0 + })); + break; + + case 'members': + data = await fetchWithAuth('/members', token); + // Filter out potentially inactive members and map to display format + const activeMembers = (data.entries || data).filter((member: any) => { + // Add filters for inactive members based on patterns you identify + // For now, keeping all members - you can modify this filter + return true; + }); + + transformedData = activeMembers.map((member: any) => ({ + id: member.memberId, + name: `${member.firstName} ${member.lastName}`.trim(), + email: member.email, + firstName: member.firstName, + lastName: member.lastName, + type: member.memberType + })); + break; + + case 'workbooks': + data = await fetchWithAuth('/workbooks', token); + transformedData = (data.entries || data).map((workbook: any) => ({ + id: workbook.workbookId, + name: workbook.name, + path: workbook.path, + ownerId: workbook.ownerId, + createdBy: workbook.createdBy, + url: workbook.url + })); + break; + + case 'connections': + data = await fetchWithAuth('/connections', token); + transformedData = (data.entries || data).map((connection: any) => ({ + id: connection.connectionId, + name: connection.name, + type: connection.type, + description: connection.description || '' + })); + break; + + case 'workspaces': + data = await fetchWithAuth('/workspaces', token); + transformedData = (data.entries || data).map((workspace: any) => ({ + id: workspace.workspaceId, + name: workspace.name, + description: workspace.description || '' + })); + break; + + case 'bookmarks': + // Using favorites endpoint since bookmarks API maps to favorites + data = await fetchWithAuth('/favorites', token); + transformedData = (data.entries || data).map((favorite: any) => ({ + id: favorite.favoriteId || favorite.inodeId, + name: favorite.name || favorite.title, + description: favorite.description || '', + type: favorite.type || 'favorite', + url: favorite.url + })); + break; + + case 'templates': + data = await fetchWithAuth('/templates', token); + transformedData = (data.entries || data).map((template: any) => ({ + id: template.templateId, + name: template.name, + description: template.description || '', + type: template.type + })); + break; + + case 'datasets': + data = await fetchWithAuth('/datasets', token); + transformedData = (data.entries || data).map((dataset: any) => ({ + id: dataset.datasetId, + name: dataset.name, + description: dataset.description || '', + type: dataset.type + })); + break; + + case 'dataModels': + data = await fetchWithAuth('/dataModels', token); + transformedData = (data.entries || data).map((dataModel: any) => ({ + id: dataModel.dataModelId, + name: dataModel.name, + description: dataModel.description || '', + type: dataModel.type || 'dataModel' + })); + break; + + case 'accountTypes': + data = await fetchWithAuth('/accountTypes', token); + console.log('AccountTypes raw data:', JSON.stringify(data, null, 2)); + transformedData = (data.entries || data).map((accountType: any) => ({ + id: accountType.accountTypeName, + name: accountType.accountTypeName, + description: accountType.description || '', + type: accountType.isCustom ? 'custom' : 'built-in', + isCustom: accountType.isCustom + })); + break; + + case 'workbookElements': + const workbookId = searchParams.get('workbookId'); + if (!workbookId) { + return NextResponse.json( + { error: 'workbookId parameter is required for workbookElements' }, + { status: 400 } + ); + } + + try { + // First, get all pages from the workbook + console.log(`Fetching pages for workbook: ${workbookId}`); + const pagesData = await fetchWithAuth(`/workbooks/${workbookId}/pages`, token); + console.log('Pages data:', JSON.stringify(pagesData, null, 2)); + + const pages = pagesData.entries || pagesData || []; + let allElements: any[] = []; + + // For each page, get its elements + for (const page of pages) { + const pageId = page.pageId || page.id; + if (pageId) { + try { + console.log(`Fetching elements for page: ${pageId}`); + const elementsData = await fetchWithAuth(`/workbooks/${workbookId}/pages/${pageId}/elements`, token); + console.log(`Elements data for page ${pageId}:`, JSON.stringify(elementsData, null, 2)); + + const pageElements = elementsData.entries || elementsData || []; + + // Add page information to each element + const elementsWithPageInfo = pageElements.map((element: any) => ({ + ...element, + pageId: pageId, + pageName: page.name || page.title || `Page ${pageId}` + })); + + allElements = allElements.concat(elementsWithPageInfo); + } catch (pageError) { + console.warn(`Failed to fetch elements for page ${pageId}:`, pageError); + // Continue with other pages even if one fails + } + } + } + + console.log('All extracted elements:', allElements); + + transformedData = allElements.map((element: any) => ({ + id: element.elementId || element.id || element.elementUid, + name: element.name || element.title || element.displayName || `${element.pageName} - ${element.name || element.title || element.displayName || 'Unnamed Element'}`, + type: element.type || element.elementType || 'element', + description: element.description || `Element on page: ${element.pageName}`, + pageId: element.pageId, + pageName: element.pageName + })); + + } catch (error) { + console.error('Error fetching workbook elements:', error); + transformedData = []; + } + + console.log('Final transformed elements data:', transformedData); + break; + + case 'materializationSchedules': + const workbookIdForMat = searchParams.get('workbookId'); + if (!workbookIdForMat) { + return NextResponse.json( + { error: 'workbookId parameter is required for materializationSchedules' }, + { status: 400 } + ); + } + + try { + console.log(`Fetching materialization schedules for workbook: ${workbookIdForMat}`); + const schedulesData = await fetchWithAuth(`/workbooks/${workbookIdForMat}/materialization-schedules`, token); + console.log('Materialization schedules data:', JSON.stringify(schedulesData, null, 2)); + + const schedules = schedulesData.entries || schedulesData || []; + + transformedData = schedules.map((schedule: any) => ({ + id: schedule.sheetId, // Use sheetId as the value that will be sent to the script + name: schedule.elementName, // Display the element name to the user + description: `${schedule.schedule.cronSpec} ${schedule.schedule.timezone}${schedule.paused ? ' - PAUSED' : ''}`, + type: 'materializationSchedule', + sheetId: schedule.sheetId, + elementName: schedule.elementName, + cronSpec: schedule.schedule.cronSpec, + timezone: schedule.schedule.timezone, + paused: schedule.paused + })); + + } catch (error) { + console.error('Error fetching materialization schedules:', error); + transformedData = []; + } + + console.log('Final transformed schedules data:', transformedData); + break; + + default: + return NextResponse.json( + { error: `Unsupported resource type: ${type}` }, + { status: 400 } + ); + } + + return NextResponse.json({ + type, + count: transformedData.length, + data: transformedData.sort((a: any, b: any) => (a.name || '').localeCompare(b.name || '')) + }); + + } catch (error) { + console.error('Error in resources API:', error); + return NextResponse.json( + { error: 'Failed to fetch resources' }, + { status: 500 } + ); + } +} \ No newline at end of file diff --git a/recipe-portal/app/api/token/clear/route.ts b/recipe-portal/app/api/token/clear/route.ts new file mode 100644 index 00000000..4c0f4b01 --- /dev/null +++ b/recipe-portal/app/api/token/clear/route.ts @@ -0,0 +1,65 @@ +import { NextResponse } from 'next/server'; +import fs from 'fs'; +import path from 'path'; +import os from 'os'; + +// Configuration-specific token caching +function getTokenCacheFile(clientId) { + // Create a safe filename using first 8 chars of clientId + const configHash = clientId ? clientId.substring(0, 8) : 'default'; + return path.join(os.tmpdir(), `sigma-portal-token-${configHash}.json`); +} + +export async function POST(request: Request) { + try { + const { clientId, clearAll } = await request.json(); + + console.log('Token clear request:', { clientId, clearAll }); + + if (clearAll) { + // Clear all token cache files + const tempDir = os.tmpdir(); + const files = fs.readdirSync(tempDir); + const tokenFiles = files.filter(file => file.startsWith('sigma-portal-token-') && file.endsWith('.json')); + + console.log('Clearing all tokens:', tokenFiles); + + let clearedCount = 0; + for (const file of tokenFiles) { + try { + fs.unlinkSync(path.join(tempDir, file)); + clearedCount++; + console.log(`Cleared token file: ${file}`); + } catch (err) { + console.warn(`Failed to delete token file ${file}:`, err); + } + } + + return NextResponse.json({ + success: true, + message: `Cleared ${clearedCount} authentication token(s)` + }); + } else { + // Clear specific configuration's token + const TOKEN_CACHE_FILE = getTokenCacheFile(clientId); + + if (fs.existsSync(TOKEN_CACHE_FILE)) { + fs.unlinkSync(TOKEN_CACHE_FILE); + } + + return NextResponse.json({ + success: true, + message: 'Authentication token cleared successfully' + }); + } + } catch (error) { + console.error('Error clearing token:', error); + return NextResponse.json( + { + success: false, + error: 'Failed to clear authentication token' + }, + { status: 500 } + ); + } +} \ No newline at end of file diff --git a/recipe-portal/app/api/token/route.ts b/recipe-portal/app/api/token/route.ts new file mode 100644 index 00000000..01e78b35 --- /dev/null +++ b/recipe-portal/app/api/token/route.ts @@ -0,0 +1,87 @@ +import { NextResponse } from 'next/server'; +import fs from 'fs'; +import path from 'path'; +import os from 'os'; + +// Configuration-specific token caching +function getTokenCacheFile(clientId: string) { + // Create a safe filename using first 8 chars of clientId + const configHash = clientId ? clientId.substring(0, 8) : 'default'; + return path.join(os.tmpdir(), `sigma-portal-token-${configHash}.json`); +} + +export async function GET() { + try { + // Look for the most recent valid token across all configurations + const tempDir = os.tmpdir(); + const files = fs.readdirSync(tempDir); + const tokenFiles = files.filter(file => file.startsWith('sigma-portal-token-') && file.endsWith('.json')); + + let mostRecentToken = null; + let mostRecentTime = 0; + + console.log('Found token files:', tokenFiles); + + for (const file of tokenFiles) { + try { + const filePath = path.join(tempDir, file); + const tokenData = JSON.parse(fs.readFileSync(filePath, 'utf8')); + const now = Date.now(); + + // Check if token is still valid (not expired) + if (tokenData.expiresAt && now < tokenData.expiresAt) { + // Use the most recently created/accessed token + const lastAccessTime = tokenData.lastAccessed || tokenData.createdAt; + console.log(`Token ${file}: clientId=${tokenData.clientId?.substring(0,8)}, createdAt=${new Date(tokenData.createdAt)}, lastAccessed=${tokenData.lastAccessed ? new Date(tokenData.lastAccessed) : 'none'}, lastAccessTime=${lastAccessTime}`); + + if (lastAccessTime > mostRecentTime) { + console.log(` -> This is the most recent token so far`); + mostRecentTime = lastAccessTime; + mostRecentToken = { + hasValidToken: true, + token: tokenData.token, + expiresAt: tokenData.expiresAt, + timeRemaining: Math.round((tokenData.expiresAt - now) / 1000 / 60), // minutes + clientId: tokenData.clientId, + filePath: filePath // Keep track of which file this came from + }; + } + } else { + // Token expired, remove file + fs.unlinkSync(filePath); + } + } catch (err) { + // Skip invalid token files + console.warn(`Failed to read token file ${file}:`, err); + } + } + + if (mostRecentToken) { + console.log(`Selected token: clientId=${mostRecentToken.clientId?.substring(0,8)}`); + + // Update the last accessed time for this token + try { + const tokenData = JSON.parse(fs.readFileSync(mostRecentToken.filePath, 'utf8')); + tokenData.lastAccessed = Date.now(); + fs.writeFileSync(mostRecentToken.filePath, JSON.stringify(tokenData)); + } catch (err) { + console.warn('Failed to update token access time:', err); + } + + // Remove filePath from response + const { filePath, ...responseData } = mostRecentToken; + return NextResponse.json(responseData); + } + + return NextResponse.json({ + hasValidToken: false, + token: null + }); + } catch (error) { + console.error('Error checking token:', error); + return NextResponse.json({ + hasValidToken: false, + token: null + }); + } +} \ No newline at end of file diff --git a/recipe-portal/app/globals.css b/recipe-portal/app/globals.css new file mode 100644 index 00000000..bd6213e1 --- /dev/null +++ b/recipe-portal/app/globals.css @@ -0,0 +1,3 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; \ No newline at end of file diff --git a/recipe-portal/app/layout.tsx b/recipe-portal/app/layout.tsx new file mode 100644 index 00000000..6b24a7ca --- /dev/null +++ b/recipe-portal/app/layout.tsx @@ -0,0 +1,26 @@ +import type { Metadata } from 'next' +import './globals.css' + +export const metadata: Metadata = { + title: 'QuickStarts API Toolkit', + description: 'Experiment with Sigma API calls and learn common request flows', + icons: { + icon: '/crane.png', + shortcut: '/crane.png', + apple: '/crane.png', + }, +} + +export default function RootLayout({ + children, +}: { + children: React.ReactNode +}) { + return ( + + + {children} + + + ) +} \ No newline at end of file diff --git a/recipe-portal/app/page.tsx b/recipe-portal/app/page.tsx new file mode 100644 index 00000000..101a566a --- /dev/null +++ b/recipe-portal/app/page.tsx @@ -0,0 +1,332 @@ +'use client'; + +import { useState, useEffect, useCallback } from 'react'; +import { RecipeCard } from '../components/RecipeCard'; +import { CodeViewer } from '../components/CodeViewer'; +import { QuickApiExplorer } from '../components/QuickApiExplorer'; + +interface Recipe { + id: string; + name: string; + description: string; + category: string; + filePath: string; + envVariables: string[]; + isAuthRequired: boolean; +} + +interface RecipeCategory { + name: string; + recipes: Recipe[]; +} + +interface RecipeData { + categories: RecipeCategory[]; + authRecipe: Recipe | null; + timestamp: string; +} + +export default function Home() { + const [recipeData, setRecipeData] = useState(null); + const [loading, setLoading] = useState(true); + const [error, setError] = useState (null); + const [activeTopTab, setActiveTopTab] = useState<'recipes' | 'quickapi'>('recipes'); + const [activeCategoryTab, setActiveCategoryTab] = useState (''); + const [authToken, setAuthToken] = useState (null); + const [hasValidToken, setHasValidToken] = useState(false); + const [showAuthModal, setShowAuthModal] = useState(false); + const [clearingToken, setClearingToken] = useState(false); + const [quickApiKey, setQuickApiKey] = useState(0); + + // Function to check auth status (reusable) + const checkAuthStatus = useCallback(async () => { + try { + const response = await fetch('/api/token'); + if (response.ok) { + const data = await response.json(); + if (data.hasValidToken) { + setHasValidToken(true); + setAuthToken(data.token); + } else { + setHasValidToken(false); + setAuthToken(null); + } + } else { + setHasValidToken(false); + setAuthToken(null); + } + } catch (error) { + setHasValidToken(false); + setAuthToken(null); + } + }, []); + + useEffect(() => { + async function fetchRecipes() { + try { + const response = await fetch('/api/recipes'); + if (!response.ok) { + throw new Error('Failed to fetch recipes'); + } + const data = await response.json(); + setRecipeData(data); + // Set first category as active by default + if (data.categories.length > 0) { + setActiveCategoryTab(data.categories[0].name); + } + } catch (err) { + setError(err instanceof Error ? err.message : 'Unknown error'); + } finally { + setLoading(false); + } + } + + fetchRecipes(); + checkAuthStatus(); + }, [checkAuthStatus]); + + // Periodically check auth status every 30 seconds + useEffect(() => { + const interval = setInterval(checkAuthStatus, 30000); + return () => clearInterval(interval); + }, [checkAuthStatus]); + + const clearToken = async () => { + setClearingToken(true); + try { + const response = await fetch('/api/token/clear', { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ clearAll: true }) + }); + + if (response.ok) { + setHasValidToken(false); + setAuthToken(null); + // If auth modal is open, close it to trigger form reset on next open + if (showAuthModal) { + setShowAuthModal(false); + } + } else { + console.error('Failed to clear token'); + } + } catch (error) { + console.error('Error clearing token:', error); + } finally { + setClearingToken(false); + } + }; + + if (loading) { + return ( + ++ ); + } + + if (error) { + return ( ++ ++Loading recipes...
+++ ); + } + + // Sort categories alphabetically + const sortedCategories = recipeData?.categories.sort((a, b) => a.name.localeCompare(b.name)) || []; + const activeCategory = sortedCategories.find(cat => cat.name === activeCategoryTab); + + return ( +++Error loading recipes: {error}
+ +++ ); +} diff --git a/recipe-portal/app/test/page.tsx b/recipe-portal/app/test/page.tsx new file mode 100644 index 00000000..5f2bebef --- /dev/null +++ b/recipe-portal/app/test/page.tsx @@ -0,0 +1,8 @@ +export default function TestPage() { + return ( ++ {/* Header */} +++ + + {/* Main Content Container */} +++++ + {/* Action Buttons */} +++ + ++
+ QuickStarts API Toolkit +
++ Experiment with Sigma API calls and learn common request flows +
++ + + {hasValidToken ? ( +++ + ++ ) : ( + + )} ++ {/* Top Level Tabs */} ++ + {/* Authentication Modal */} + {recipeData?.authRecipe && ( ++ ++ + {/* Tab Content */} + {activeTopTab === 'recipes' ? ( + <> + {/* Category Tabs */} ++ ++ + {/* Category Content */} ++ {activeCategory && ( ++ > + ) : ( ++ {activeCategory.recipes.map((recipe) => ( ++ )} ++ ))} + + )} + + {/* Footer */} + ++Ā© Sigma 2025
+Last updated: {recipeData ? new Date(recipeData.timestamp).toLocaleDateString() : 'ā'}
+setShowAuthModal(false)} + filePath={recipeData.authRecipe.filePath} + fileName="get-access-token.js" + envVariables={['CLIENT_ID', 'SECRET', 'authURL', 'baseURL']} + useEnvFile={false} + onTokenObtained={() => { + setHasValidToken(true); + // Refresh auth status to get the token + setTimeout(async () => { + try { + const response = await fetch('/api/token'); + if (response.ok) { + const data = await response.json(); + if (data.hasValidToken) { + setAuthToken(data.token); + } + } + } catch (error) { + // Ignore errors + } + }, 1000); + }} + onTokenCleared={() => { + setHasValidToken(false); + setAuthToken(null); + }} + defaultTab="readme" + hasValidToken={hasValidToken} + /> + )} + ++ ); +} \ No newline at end of file diff --git a/recipe-portal/components/AuthRecipeCard.tsx b/recipe-portal/components/AuthRecipeCard.tsx new file mode 100644 index 00000000..f4300095 --- /dev/null +++ b/recipe-portal/components/AuthRecipeCard.tsx @@ -0,0 +1,91 @@ +'use client'; + +import { useState } from 'react'; +import { Recipe } from '../lib/recipeScanner'; +import { CodeViewer } from './CodeViewer'; + +interface AuthRecipeCardProps { + recipe: Recipe; + useEnvFile?: boolean; + onTokenObtained?: () => void; +} + +export function AuthRecipeCard({ recipe, useEnvFile = false, onTokenObtained }: AuthRecipeCardProps) { + const [showCodeViewer, setShowCodeViewer] = useState(false); + return ( +Test Page
+If you can see this, routing is working.
+++ ); +} \ No newline at end of file diff --git a/recipe-portal/components/CodeViewer.tsx b/recipe-portal/components/CodeViewer.tsx new file mode 100644 index 00000000..cf889b90 --- /dev/null +++ b/recipe-portal/components/CodeViewer.tsx @@ -0,0 +1,1554 @@ +'use client'; + +import { useState, useEffect } from 'react'; +import { detectSmartParameters, SmartParameter, analyzeRecipeCode } from '../lib/smartParameters'; +import { SmartParameterForm } from './SmartParameterForm'; + +interface CodeViewerProps { + isOpen: boolean; + onClose: () => void; + filePath: string; + fileName: string; + envVariables?: string[]; + useEnvFile?: boolean; + onTokenObtained?: () => void; + onTokenCleared?: () => void; + defaultTab?: 'params' | 'run' | 'code' | 'readme'; + hasValidToken?: boolean; + readmePath?: string; +} + +interface ExecutionResult { + output: string; + error: string; + success: boolean | null; + timestamp: string; + httpStatus?: number; + httpStatusText?: string; + downloadInfo?: { + filename: string; + localPath: string; + size: number; + }; +} + +// Function to open the downloads folder via API +const openDownloadsFolder = async () => { + try { + await fetch('/api/open-folder', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ folder: 'downloaded-files' }) + }); + } catch (error) { + console.log('Could not open folder automatically. Please navigate to the downloaded-files folder manually.'); + } +}; + +export function CodeViewer({ isOpen, onClose, filePath, fileName, envVariables = [], useEnvFile = false, onTokenObtained, onTokenCleared, defaultTab = 'params', hasValidToken = false, readmePath }: CodeViewerProps) { + const [code, setCode] = useState++ +++ ++ š ++ ++ Authentication Setup +
+ + Required First + ++ Configure your API credentials and generate a bearer token for accessing Sigma’s REST API. Tokens are cached for reuse across recipes during your session. +
+ + {recipe.envVariables.length > 0 && ( +++ )} + +Required Environment Variables:
++ {recipe.envVariables.map((envVar) => ( + + {envVar} + + ))} +++ + + Start Here + + + Instructions ā + +++++ Token Duration: 1 hour (cached for session) ++ +setShowCodeViewer(false)} + filePath={recipe.filePath} + fileName="get-access-token.js" + envVariables={useEnvFile ? recipe.envVariables : ['CLIENT_ID', 'SECRET', 'authURL', 'baseURL']} + useEnvFile={useEnvFile} + onTokenObtained={onTokenObtained} + /> + (''); + const [loading, setLoading] = useState(false); + const [error, setError] = useState (null); + const [activeTab, setActiveTab] = useState<'params' | 'run' | 'code' | 'readme'>(defaultTab); + const [envValues, setEnvValues] = useState >({}); + const [envFileValues, setEnvFileValues] = useState >({}); + const [executing, setExecuting] = useState(false); + const [executionResult, setExecutionResult] = useState (null); + const [smartParameters, setSmartParameters] = useState ([]); + const [authToken, setAuthToken] = useState (null); + const [clearingToken, setClearingToken] = useState(false); + const [storeKeysLocally, setStoreKeysLocally] = useState(false); + const [hasStoredKeys, setHasStoredKeys] = useState(false); + const [currentFormIsStored, setCurrentFormIsStored] = useState(false); + + // Reset form when modal is closed + useEffect(() => { + if (!isOpen) { + setEnvValues({}); + setExecutionResult(null); + setActiveTab(defaultTab); + setError(null); + setSetAsDefault(false); + setCopyButtonText('Copy Output'); + } + }, [isOpen, defaultTab]); + const [saveNotification, setSaveNotification] = useState (null); + const [credentialSetName, setCredentialSetName] = useState(''); + const [availableCredentialSets, setAvailableCredentialSets] = useState ([]); + const [selectedCredentialSet, setSelectedCredentialSet] = useState(''); + const [setAsDefault, setSetAsDefault] = useState(false); + const [defaultCredentialSet, setDefaultCredentialSet] = useState (null); + const [copyButtonText, setCopyButtonText] = useState('Copy Output'); + const [customReadme, setCustomReadme] = useState (null); + const [readmeLoading, setReadmeLoading] = useState(false); + + useEffect(() => { + if (isOpen && filePath) { + // Smart default tab selection based on whether script has parameters + let smartDefaultTab: 'params' | 'run' | 'code' | 'readme'; + if (fileName === 'get-access-token.js') { + // Auth script: README first + smartDefaultTab = 'readme'; + } else if (smartParameters.length > 0) { + // Has parameters: Request first + smartDefaultTab = 'params'; + } else { + // No parameters: Run Script (Response) first + smartDefaultTab = 'run'; + } + + // Only set the tab if it's not already set to avoid switching during execution + // Don't switch tabs if we're currently executing or if we have results to show + if (!executing && !executionResult && (activeTab === defaultTab || (activeTab === 'run' && smartParameters.length > 0))) { + setActiveTab(smartDefaultTab); + } + fetchCode(); + checkAuthToken(); + if (useEnvFile) { + fetchEnvFile(); + } + } else if (!isOpen) { + // Reset form when modal is closed + if (fileName === 'get-access-token.js') { + setEnvValues({ + 'baseURL': 'https://aws-api.sigmacomputing.com/v2', + 'authURL': 'https://aws-api.sigmacomputing.com/v2/auth/token', + 'CLIENT_ID': '', + 'SECRET': '' + }); + } + setExecutionResult(null); + } + }, [isOpen, filePath, useEnvFile, fileName, executing, smartParameters.length, executionResult]); + + // Set default auth values for authentication script + useEffect(() => { + if (fileName === 'get-access-token.js' && !envValues['baseURL']) { + // Set defaults for auth script + handleEnvChange('baseURL', 'https://aws-api.sigmacomputing.com/v2'); + handleEnvChange('authURL', 'https://aws-api.sigmacomputing.com/v2/auth/token'); + } + }, [fileName, envValues]); + + // Sync internal auth state with parent + useEffect(() => { + if (!hasValidToken) { + setAuthToken(null); + + // Clear form fields when session is ended from main page + if (fileName === 'get-access-token.js') { + setEnvValues({ + 'baseURL': 'https://aws-api.sigmacomputing.com/v2', + 'authURL': 'https://aws-api.sigmacomputing.com/v2/auth/token', + 'CLIENT_ID': '', + 'SECRET': '' + }); + } + } + }, [hasValidToken, fileName]); + + // Load custom README if available + useEffect(() => { + if (readmePath && isOpen) { + setReadmeLoading(true); + fetch(`/api/readme?path=${encodeURIComponent(readmePath)}&format=json`) + .then(response => response.json()) + .then(data => { + if (data.success) { + setCustomReadme(data.content); + } + }) + .catch(error => { + console.error('Failed to load custom README:', error); + }) + .finally(() => { + setReadmeLoading(false); + }); + } else { + setCustomReadme(null); + } + }, [readmePath, isOpen]); + + // Detect smart parameters when code changes + useEffect(() => { + if (code) { + // Analyze code to find parameters + const analysis = analyzeRecipeCode(code, { filePath }); + const detected = detectSmartParameters(analysis.suggestedParameters, { filePath }); + setSmartParameters(detected); + } + }, [code, filePath]); + + // Check for stored credentials when auth modal opens + // Only auto-populate if form is empty (app startup scenario) + useEffect(() => { + const checkStoredCredentials = async () => { + if (isOpen && fileName === 'get-access-token.js') { + try { + const response = await fetch('/api/keys?retrieve=true'); + if (response.ok) { + const data = await response.json(); + setHasStoredKeys(data.hasStoredKeys); + setAvailableCredentialSets(data.credentialSets || []); + setDefaultCredentialSet(data.defaultSet || null); + + // Only auto-populate if fields are empty AND we have a valid token + // This prevents re-population after "End Session" is clicked + const hasEmptyFields = !envValues['CLIENT_ID'] && !envValues['SECRET']; + + if (data.hasStoredKeys && data.credentials && hasEmptyFields && hasValidToken) { + // Auto-populate form with complete config on startup + handleEnvChange('CLIENT_ID', data.credentials.clientId); + handleEnvChange('SECRET', data.credentials.clientSecret); + handleEnvChange('baseURL', data.credentials.baseURL); + handleEnvChange('authURL', data.credentials.authURL); + setStoreKeysLocally(true); // Check the checkbox since keys are stored + setSelectedCredentialSet(data.defaultSet || ''); + setCurrentFormIsStored(true); // Mark current form as representing stored data + } + } + } catch (error) { + console.log('Error checking stored credentials:', error); + } + } + }; + + checkStoredCredentials(); + }, [isOpen, fileName]); + + const checkAuthToken = async () => { + try { + console.log('checkAuthToken: Fetching current token from /api/token'); + const response = await fetch('/api/token'); + if (response.ok) { + const data = await response.json(); + console.log('checkAuthToken: Response from /api/token:', { hasValidToken: data.hasValidToken, clientId: data.clientId?.substring(0,8) }); + if (data.hasValidToken && data.token) { + console.log('checkAuthToken: Updating authToken state'); + setAuthToken(data.token); + } + } + } catch (error) { + console.log('No cached token available'); + } + }; + + const clearToken = async () => { + setClearingToken(true); + try { + // Clear the session token + const response = await fetch('/api/token/clear', { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ clearAll: true }) + }); + + if (response.ok) { + setAuthToken(null); + + // Handle stored keys logic for auth script + if (fileName === 'get-access-token.js') { + if (!storeKeysLocally && hasStoredKeys) { + // User unchecked the box - clear stored keys + await fetch('/api/keys', { method: 'DELETE' }); + setHasStoredKeys(false); + } + + // Always clear form fields on End Session + // This implements the new UX flow: + // - Session-only: fields cleared + // - Storage enabled: fields cleared (will be restored on next startup) + setEnvValues({ + 'baseURL': 'https://aws-api.sigmacomputing.com/v2', + 'authURL': 'https://aws-api.sigmacomputing.com/v2/auth/token', + 'CLIENT_ID': '', + 'SECRET': '' + }); + } + + if (onTokenCleared) { + onTokenCleared(); + } + } else { + console.error('Failed to clear token'); + } + } catch (error) { + console.error('Error clearing token:', error); + } finally { + setClearingToken(false); + } + }; + + + const fetchCode = async () => { + setLoading(true); + setError(null); + + try { + const response = await fetch(`/api/code?path=${encodeURIComponent(filePath)}`); + if (!response.ok) { + throw new Error('Failed to fetch code'); + } + const data = await response.json(); + setCode(data.content); + } catch (err) { + setError(err instanceof Error ? err.message : 'Unknown error'); + } finally { + setLoading(false); + } + }; + + const copyToClipboard = () => { + navigator.clipboard.writeText(code); + alert('Code copied to clipboard!'); + }; + + const copyFilePath = () => { + navigator.clipboard.writeText(filePath); + alert('File path copied to clipboard!'); + }; + + const fetchEnvFile = async () => { + try { + const response = await fetch('/api/env'); + if (response.ok) { + const data = await response.json(); + setEnvFileValues(data.values); + // Pre-fill envValues with file values when useEnvFile is true + if (data.values) { + setEnvValues(data.values); + } + } + } catch (err) { + console.error('Failed to fetch env file values:', err); + } + }; + + const getDownloadFilename = (fileName: string, envValues: Record ) => { + switch (fileName) { + case 'export-workbook-element-csv.js': + return envValues['EXPORT_FILENAME'] || 'export.csv'; + case 'export-workbook-pdf.js': + return 'workbook-export.pdf'; + default: + return 'download'; + } + }; + + const getDownloadContentType = (fileName: string) => { + switch (fileName) { + case 'export-workbook-element-csv.js': + return 'text/csv'; + case 'export-workbook-pdf.js': + return 'application/pdf'; + default: + return 'application/octet-stream'; + } + }; + + const createBlobFromContent = (content: string, contentType: string) => { + // All content from DOWNLOAD_RESULT protocol is base64 encoded + try { + const byteCharacters = atob(content); + const byteNumbers = new Array(byteCharacters.length); + for (let i = 0; i < byteCharacters.length; i++) { + byteNumbers[i] = byteCharacters.charCodeAt(i); + } + const byteArray = new Uint8Array(byteNumbers); + return new Blob([byteArray], { type: contentType }); + } catch (error) { + // Fallback for non-base64 content (shouldn't happen with new protocol) + console.warn('Failed to decode base64 content, treating as text:', error); + return new Blob([content], { type: contentType }); + } + }; + + const handleStreamingDownload = async (filePath: string, envVariables: Record , filename: string, contentType: string) => { + try { + const response = await fetch('/api/download-stream', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + filePath, + envVariables, + filename, + contentType + }) + }); + + if (!response.ok) { + throw new Error('Failed to start download stream'); + } + + const reader = response.body?.getReader(); + if (!reader) { + throw new Error('No response body'); + } + + let outputMessages: string[] = []; + let jsonBuffer = ''; // Persistent buffer for handling split JSON messages + + // Initialize with starting message + const startingMessage = `${new Date().toLocaleTimeString()} - Starting export process...`; + outputMessages.push(startingMessage); + + setExecutionResult({ + output: startingMessage + '\n', + error: '', + success: null, // null indicates "in progress" + timestamp: new Date().toISOString() + }); + + const decoder = new TextDecoder(); + + while (true) { + const { done, value } = await reader.read(); + + if (done) break; + + const chunk = decoder.decode(value); + const lines = chunk.split('\n'); + + for (const line of lines) { + if (line.startsWith('data: ') && line.trim() !== 'data: ') { + const jsonPart = line.substring(6); + jsonBuffer += jsonPart; + + // Try to parse the accumulated JSON + try { + const data = JSON.parse(jsonBuffer); + // Success! Reset buffer and process the data + jsonBuffer = ''; + const timestamp = new Date(data.timestamp).toLocaleTimeString(); + + // Add message to beginning of array (newest first) + // Show debug messages during development + const prefix = ''; + const newMessage = `${timestamp} - ${prefix}${data.message}`; + outputMessages.unshift(newMessage); + + // Keep only last 100 messages to see debug info + if (outputMessages.length > 100) { + outputMessages = outputMessages.slice(0, 100); + } + + // Update the execution result with progressive output (newest first) + setExecutionResult({ + output: outputMessages.join('\n') + '\n', + error: '', + success: null, // Keep as "in progress" until completion + timestamp: data.timestamp + }); + + // Handle download completion with folder link + if (data.type === 'success' && data.data && data.data.filename) { + // Create clickable message to open downloads folder + const folderMessage = `${timestamp} - š File saved! Click here to open downloads folder`; + const fileInfo = `${timestamp} - ā ${data.data.filename} (${Math.round(data.data.size / 1024)}KB) saved to downloaded-files/`; + outputMessages.unshift(folderMessage); + outputMessages.unshift(fileInfo); + + setExecutionResult({ + output: outputMessages.join('\n') + '\n', + error: '', + success: true, + timestamp: data.timestamp, + downloadInfo: { + filename: data.data.filename, + localPath: data.data.localPath, + size: data.data.size + } + }); + + // Switch to Response tab to show the completion message + setActiveTab('run'); + } + + // Handle errors + if (data.type === 'error') { + setExecutionResult({ + output: outputMessages.join('\n') + '\n', + error: data.message, + success: false, + timestamp: data.timestamp + }); + break; + } + + } catch (e) { + // JSON parsing failed - this might be a partial message + // Keep the buffer and wait for more data, but limit buffer size to prevent memory issues + if (jsonBuffer.length > 500000) { // 500KB limit + console.error('JSON buffer too large, discarding:', jsonBuffer.substring(0, 100) + '...'); + jsonBuffer = ''; + } + // Don't log every parse error as they're expected for partial messages + } + } else if (line.trim() === '' && jsonBuffer) { + // Empty line might indicate end of an SSE message - try to parse what we have + try { + const data = JSON.parse(jsonBuffer); + jsonBuffer = ''; // Reset on successful parse + + const timestamp = new Date(data.timestamp).toLocaleTimeString(); + const newMessage = `${timestamp} - ${data.message}`; + outputMessages.unshift(newMessage); + + if (outputMessages.length > 100) { + outputMessages = outputMessages.slice(0, 100); + } + + setExecutionResult({ + output: outputMessages.join('\n') + '\n', + error: '', + success: null, + timestamp: data.timestamp + }); + + // Handle download completion (same logic as above) + if (data.type === 'success' && data.data && data.data.filename) { + const folderMessage = `${timestamp} - š File saved! Click here to open downloads folder`; + const fileInfo = `${timestamp} - ā ${data.data.filename} (${Math.round(data.data.size / 1024)}KB) saved to downloaded-files/`; + outputMessages.unshift(folderMessage); + outputMessages.unshift(fileInfo); + + setExecutionResult({ + output: outputMessages.join('\n') + '\n', + error: '', + success: true, + timestamp: data.timestamp, + downloadInfo: { + filename: data.data.filename, + localPath: data.data.localPath, + size: data.data.size + } + }); + + setActiveTab('run'); + } + + if (data.type === 'error') { + setExecutionResult({ + output: outputMessages.join('\n') + '\n', + error: data.message, + success: false, + timestamp: data.timestamp + }); + return; // Exit the stream processing + } + + } catch (e) { + // Still couldn't parse - keep waiting for more data + } + } + } + } + + } catch (error) { + setExecutionResult({ + output: '', + error: error instanceof Error ? error.message : 'Unknown streaming error', + success: false, + timestamp: new Date().toISOString() + }); + } + }; + + const executeScript = async () => { + console.log('executeScript called'); + setExecuting(true); + setExecutionResult(null); + + try { + let currentEnvValues = envValues; + + // If using env file, refresh the values before execution + if (useEnvFile) { + const response = await fetch('/api/env'); + if (response.ok) { + const data = await response.json(); + setEnvFileValues(data.values); + // Use the fresh values directly instead of waiting for state update + currentEnvValues = data.values; + setEnvValues(data.values); + } + } + + // Add core auth variables (will be filled from centralized auth, direct input, or env file) + const coreAuthVars = { + 'CLIENT_ID': useEnvFile ? (currentEnvValues['CLIENT_ID'] || envFileValues['CLIENT_ID'] || '') : (currentEnvValues['CLIENT_ID'] || ''), + 'SECRET': useEnvFile ? (currentEnvValues['SECRET'] || envFileValues['SECRET'] || '') : (currentEnvValues['SECRET'] || ''), + 'authURL': useEnvFile ? (envFileValues['authURL'] || 'https://aws-api.sigmacomputing.com/v2/auth/token') : 'https://aws-api.sigmacomputing.com/v2/auth/token', + 'baseURL': useEnvFile ? (envFileValues['baseURL'] || 'https://aws-api.sigmacomputing.com/v2') : 'https://aws-api.sigmacomputing.com/v2' + }; + + // Validate that required auth credentials are provided (for auth script only) + console.log('Validating auth credentials:', { fileName, coreAuthVars }); + if (fileName === 'get-access-token.js' && (!coreAuthVars.CLIENT_ID || !coreAuthVars.SECRET)) { + console.log('Validation failed - missing credentials'); + setExecutionResult({ + output: '', + error: 'Authentication required: Please provide CLIENT_ID and SECRET credentials in the Config tab.', + success: false, + timestamp: new Date().toISOString(), + httpStatus: 401, + httpStatusText: 'Unauthorized' + }); + setExecuting(false); + return; + } + + console.log('Validation passed, continuing execution...'); + + const allEnvVariables = { ...coreAuthVars, ...currentEnvValues }; + console.log('About to make API request with variables:', Object.keys(allEnvVariables)); + + + + // Check if this is a download recipe + const isDownloadRecipe = ['export-workbook-element-csv.js', 'export-workbook-pdf.js'].includes(fileName); + + let result; + let response; + + if (isDownloadRecipe) { + // Handle download recipes with streaming progress + await handleStreamingDownload(filePath, allEnvVariables, getDownloadFilename(fileName, currentEnvValues), getDownloadContentType(fileName)); + return; // Exit early since streaming handles everything + } else { + // Handle regular recipes + response = await fetch('/api/execute', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + filePath, + envVariables: allEnvVariables + }) + }); + + console.log('API response received:', response.status, response.statusText); + + result = await response.json(); + console.log('API result:', result); + setExecutionResult(result); + console.log('ExecutionResult set, switching to run tab'); + setActiveTab('run'); + } + + // If this is an auth script and execution was successful, notify parent and refresh token + if (result.success && fileName === 'get-access-token.js' && onTokenObtained) { + onTokenObtained(); + + // Switch to Response tab to show authentication result + setActiveTab('run'); + + // Store complete config (credentials + server settings) if user checked the box + if (storeKeysLocally && allEnvVariables['CLIENT_ID'] && allEnvVariables['SECRET']) { + try { + const setName = credentialSetName.trim(); + if (!setName) { + console.warn('Cannot save credentials without a name during authentication'); + // Continue with authentication but don't save + setTimeout(() => checkAuthToken(), 1000); + return; + } + await fetch('/api/keys', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + clientId: allEnvVariables['CLIENT_ID'], + clientSecret: allEnvVariables['SECRET'], + baseURL: allEnvVariables['baseURL'], + authURL: allEnvVariables['authURL'], + name: setName, + setAsDefault: setAsDefault + }) + }); + setHasStoredKeys(true); + setCurrentFormIsStored(true); // Mark current form as stored + + // Show success notification for auto-save during authentication + showSaveNotification(`Config "${setName}" saved during authentication!`); + + // Update available sets + const updatedResponse = await fetch('/api/keys?list=true'); + if (updatedResponse.ok) { + const updatedData = await updatedResponse.json(); + setAvailableCredentialSets(updatedData.credentialSets || []); + setDefaultCredentialSet(updatedData.defaultSet || null); + } + } catch (error) { + console.error('Failed to store credentials:', error); + } + } + + // Refresh the auth token for smart parameter dropdowns + setTimeout(() => checkAuthToken(), 1000); + } + + if (!response.ok) { + throw new Error(result.error || 'Execution failed'); + } + } catch (err) { + setExecutionResult({ + output: '', + error: err instanceof Error ? err.message : 'Unknown error', + success: false, + timestamp: new Date().toISOString() + }); + } finally { + setExecuting(false); + } + }; + + const loadCredentialSet = async (setName: string) => { + try { + const response = await fetch(`/api/keys?retrieve=true&set=${encodeURIComponent(setName)}`); + if (response.ok) { + const data = await response.json(); + if (data.credentials) { + // Load complete config: credentials + server settings + handleEnvChange('CLIENT_ID', data.credentials.clientId); + handleEnvChange('SECRET', data.credentials.clientSecret); + handleEnvChange('baseURL', data.credentials.baseURL); + handleEnvChange('authURL', data.credentials.authURL); + setCredentialSetName(setName); + setCurrentFormIsStored(true); // Mark current form as representing stored data + } + } + } catch (error) { + console.error('Failed to load credential set:', error); + } + }; + + const handleEnvChange = (key: string, value: string) => { + setEnvValues(prev => ({ + ...prev, + [key]: value + })); + + // Mark form as unsaved when credentials or server settings change + if (['CLIENT_ID', 'SECRET', 'baseURL', 'authURL'].includes(key)) { + setCurrentFormIsStored(false); + } + }; + + const showSaveNotification = (message: string) => { + setSaveNotification(message); + setTimeout(() => setSaveNotification(null), 3000); // Auto-hide after 3 seconds + }; + + const deleteConfig = async (configName: string) => { + try { + await fetch(`/api/keys?config=${encodeURIComponent(configName)}`, { + method: 'DELETE' + }); + + // Clear form if we deleted the currently selected config + if (selectedCredentialSet === configName) { + setSelectedCredentialSet(''); + setCredentialSetName(''); + handleEnvChange('CLIENT_ID', ''); + handleEnvChange('SECRET', ''); + handleEnvChange('baseURL', 'https://aws-api.sigmacomputing.com/v2'); + handleEnvChange('authURL', 'https://aws-api.sigmacomputing.com/v2/auth/token'); + setCurrentFormIsStored(false); + } + + // Update available sets + const updatedResponse = await fetch('/api/keys?list=true'); + if (updatedResponse.ok) { + const updatedData = await updatedResponse.json(); + setAvailableCredentialSets(updatedData.credentialSets || []); + setDefaultCredentialSet(updatedData.defaultSet || null); + setHasStoredKeys(updatedData.credentialSets?.length > 0); + } + + showSaveNotification(`Config "${configName}" deleted successfully!`); + } catch (error) { + console.error('Failed to delete config:', error); + showSaveNotification('Failed to delete config. Please try again.'); + } + }; + + if (!isOpen) return null; + + return ( + ++ {/* Header */} ++ ); +} \ No newline at end of file diff --git a/recipe-portal/components/QuickApiExplorer.tsx b/recipe-portal/components/QuickApiExplorer.tsx new file mode 100644 index 00000000..ea38aed1 --- /dev/null +++ b/recipe-portal/components/QuickApiExplorer.tsx @@ -0,0 +1,287 @@ +'use client'; + +import { useState, useEffect } from 'react'; +import { detectSmartParameters, SmartParameter } from '../lib/smartParameters'; +import { QuickApiModal } from './QuickApiModal'; + +interface QuickApiEndpoint { + id: string; + name: string; + method: 'GET'; + path: string; + description: string; + category: 'List All' | 'Get Details'; + parameters: SmartParameter[]; + example?: string; +} + +interface QuickApiExplorerProps { + hasValidToken: boolean; + authToken?: string | null; +} + +const QUICK_ENDPOINTS: QuickApiEndpoint[] = [ + // Zero parameter endpoints + { + id: 'list-accounttypes', + name: 'Account Types', + method: 'GET', + path: '/accountTypes', + description: 'Get a list of all account types', + category: 'List All', + parameters: [], + example: 'View different account type configurations' + }, + { + id: 'list-connections', + name: 'Connections', + method: 'GET', + path: '/connections', + description: 'Get a list of all data connections', + category: 'List All', + parameters: [], + example: 'See all configured data sources' + }, + { + id: 'list-datamodels', + name: 'Data Models', + method: 'GET', + path: '/dataModels', + description: 'Get a list of all data models', + category: 'List All', + parameters: [], + example: 'See all available data models' + }, + { + id: 'list-members', + name: 'Members', + method: 'GET', + path: '/members', + description: 'Get a list of all members in your organization', + category: 'List All', + parameters: [], + example: 'See all users and their details' + }, + { + id: 'list-teams', + name: 'Teams', + method: 'GET', + path: '/teams', + description: 'Get a list of all teams in your organization', + category: 'List All', + parameters: [], + example: 'Perfect for seeing all available teams' + }, + { + id: 'list-templates', + name: 'Templates', + method: 'GET', + path: '/templates', + description: 'Get a list of available templates', + category: 'List All', + parameters: [], + example: 'Browse reusable templates' + }, + { + id: 'list-workbooks', + name: 'Workbooks', + method: 'GET', + path: '/workbooks', + description: 'Get a list of all workbooks you have access to', + category: 'List All', + parameters: [], + example: 'Browse all available workbooks' + }, + { + id: 'list-workspaces', + name: 'Workspaces', + method: 'GET', + path: '/workspaces', + description: 'Get a list of all workspaces', + category: 'List All', + parameters: [], + example: 'View organizational structure' + }, + // Single parameter endpoints + { + id: 'get-datamodel', + name: 'Data Model Details', + method: 'GET', + path: '/dataModels/{dataModelId}', + description: 'Get detailed information about a specific data model', + category: 'Get Details', + parameters: detectSmartParameters(['dataModelId']), + example: 'Get data model structure and metadata' + }, + { + id: 'get-member', + name: 'Member Details', + method: 'GET', + path: '/members/{memberId}', + description: 'Get detailed information about a specific member', + category: 'Get Details', + parameters: detectSmartParameters(['memberId']), + example: 'Get user profile and permissions' + }, + { + id: 'get-team', + name: 'Team Details', + method: 'GET', + path: '/teams/{teamId}', + description: 'Get detailed information about a specific team', + category: 'Get Details', + parameters: detectSmartParameters(['teamId']), + example: 'Get team members and permissions' + }, + { + id: 'get-workbook', + name: 'Workbook Details', + method: 'GET', + path: '/workbooks/{workbookId}', + description: 'Get detailed information about a specific workbook', + category: 'Get Details', + parameters: detectSmartParameters(['workbookId']), + example: 'Get workbook metadata and structure' + }, + { + id: 'get-workbook-pages', + name: 'Workbook Pages', + method: 'GET', + path: '/workbooks/{workbookId}/pages', + description: 'Get all pages in a workbook with their metadata', + category: 'Get Details', + parameters: detectSmartParameters(['workbookId']), + example: 'See all pages and their structure in the workbook' + } +]; + +export function QuickApiExplorer({ hasValidToken, authToken }: QuickApiExplorerProps) { + const [activeCategory, setActiveCategory] = useState<'List All' | 'Get Details'>('List All'); + const [selectedEndpoint, setSelectedEndpoint] = useState++ + {/* Tabs */} +++ +{fileName}
++ {fileName === 'get-access-token.js' ? ( + // Auth script tab order: README ā Config + <> + + + + + > + ) : ( + // Regular recipe tab order: Config ā Response ā README ā View Recipe (if params exist) + // Or: Response ā README ā View Recipe (if no params) + <> + {smartParameters.length > 0 && ( + + )} + + + + > + )} ++ + {/* Content */} ++ {activeTab === 'readme' ? ( +++ {fileName === 'get-access-token.js' ? ( ++ +++ ) : ( ++++ š ++ +Authentication Setup
+ + Required First + ++ Configure your API credentials and generate a bearer token for accessing Sigma’s REST API. + Tokens are cached for reuse across recipes during your session. +
+ +++ +Required Environment Variables:
++ CLIENT_ID + SECRET + authURL + baseURL +++ + + Start Here + + + Instructions ā + ++ ++ Token Duration: 1 hour (cached for session) +++ {readmeLoading ? ( ++ ) : activeTab === 'params' ? ( ++ ++ ) : customReadme ? ( +Loading README...
+++ )} +{ + let html = customReadme; + + // Handle headers + html = html.replace(/^# (.+)$/gm, '+ ) : ( +$1
'); + html = html.replace(/^## (.+)$/gm, '$1
'); + html = html.replace(/^### (.+)$/gm, '$1
'); + + // Handle inline code + html = html.replace(/`([^`]+)`/g, '$1'); + + // Handle bold text + html = html.replace(/\*\*([^*]+)\*\*/g, '$1'); + + // Handle links + html = html.replace(/\[([^\]]+)\]\(([^)]+)\)/g, '$1'); + + // Process lists line by line + const lines = html.split('\n'); + const processed = []; + let inBulletList = false; + let inNumberList = false; + + for (let i = 0; i < lines.length; i++) { + const line = lines[i]; + const trimmed = line.trim(); + + if (trimmed.startsWith('- ')) { + if (!inBulletList) { + processed.push(''); + inBulletList = true; + } + if (inNumberList) { + processed.push(''); + inNumberList = false; + } + processed.push(`
'); + inBulletList = false; + } + processed.push(`- ${trimmed.substring(2)}
`); + } else if (/^\d+\. /.test(trimmed)) { + if (!inNumberList) { + processed.push(''); + inNumberList = true; + } + if (inBulletList) { + processed.push('
${trimmed.replace(/^\d+\. /, '')} `); + } else { + if (inBulletList) { + processed.push(''); + inBulletList = false; + } + if (inNumberList) { + processed.push(''); + inNumberList = false; + } + if (trimmed === '') { + // Only add break if we're not between sections + const nextLine = lines[i + 1]?.trim(); + if (nextLine && !nextLine.startsWith('#')) { + processed.push(''); + } + } else if (trimmed.startsWith('#')) { + // Headers are already processed, just add the line + processed.push(line); + } else { + // Regular text - just add it with minimal spacing + processed.push(`${line}`); + } + } + } + + // Close any open lists + if (inBulletList) processed.push(''); + if (inNumberList) processed.push(''); + + return processed.join('\n'); + })() + }} + /> +++ )} +Recipe Information
++ This recipe demonstrates how to use the Sigma API for specific use cases. + Refer to the code and run the script to see the results. +
++ {fileName === 'get-access-token.js' ? ( ++ ) : activeTab === 'run' ? ( ++ {/* Header with Setup Guide in top-right corner */} ++ ) : ( +++ + {/* Load Existing Config - FIRST thing user does */} + {availableCredentialSets.length > 0 && ( +++ + š Setup Guide + +š Authentication Request
++ Configure your Sigma API credentials to access the platform +
++ Once authenticated, use the "End Session" button in the header to clear your authentication +
+++ )} + + {/* Server Endpoint - Manual Configuration */} +ā” Quick Start - Load Saved Config
++++ + ++ {selectedCredentialSet && ( + + )} + + ++ {availableCredentialSets.length} saved config{availableCredentialSets.length !== 1 ? 's' : ''} available +
++ + ++ + {/* API Credentials and Storage - Combined intelligently */} +++ + {/* Authentication Status */} + {(authToken || hasValidToken) && ( +š API Credentials
+ +++ + {/* Storage Options - Integrated into credentials section */} ++ + handleEnvChange('CLIENT_ID', e.target.value)} + placeholder="Enter Client ID" + className="w-full px-2 py-1 border border-blue-300 rounded text-sm font-mono focus:border-blue-500 focus:ring-1 focus:ring-blue-500 bg-white" + /> +++ + handleEnvChange('SECRET', e.target.value)} + placeholder="Enter Client Secret" + className="w-full px-2 py-1 border border-blue-300 rounded text-sm font-mono focus:border-blue-500 focus:ring-1 focus:ring-blue-500 bg-white" + /> ++++++ + {storeKeysLocally && ( ++ setStoreKeysLocally(e.target.checked)} + className="h-4 w-4 text-blue-600 focus:ring-blue-500 border-gray-300 rounded" + /> + ++ {currentFormIsStored && ( + + ā Stored + + )} ++ {/* Save notification */} + {saveNotification && ( ++ )} ++ {saveNotification} ++ )} + ++++ + { + setCredentialSetName(e.target.value); + setCurrentFormIsStored(false); // Mark as unsaved when name changes + // Reset default checkbox when changing config name + setSetAsDefault(false); + }} + placeholder="e.g., Production, Staging" + className="w-full px-2 py-1 border border-blue-300 rounded text-sm focus:border-blue-500 focus:ring-1 focus:ring-blue-500 bg-white" + /> ++ ++ setSetAsDefault(e.target.checked)} + className="h-4 w-4 text-blue-600 focus:ring-blue-500 border-blue-300 rounded" + /> + + {setAsDefault && ( + ā + )} ++ + +++ )} + + {useEnvFile && ( ++ ā + Currently Authenticated ++++ )} ++ š Environment file mode is enabled. Values above will be ignored in favor of the .env file. +
+{ + console.log('SmartParameterForm authToken:', authToken); + // Switch to Response tab immediately so user can see progress + setActiveTab('run'); + if (!executing) { + executeScript(); + } + }} + executing={executing} + onShowReadme={() => setActiveTab('readme')} + /> + )} + + {/* Copy Code and Run Script Buttons */} ++ ) : ( + // Code tab + loading ? ( ++ + ++ + {/* Parameter Summary */} + {Object.keys(envValues).length > 0 && Object.values(envValues).some(v => v && v.trim()) && ( +++ )} + + {/* Execution Results */} + {executionResult && ( +Request Parameters
++ {smartParameters.map(param => { + const value = envValues[param.name]; + if (!value || !value.trim()) return null; + + return ( +++ {param.friendlyName}: {value} ++ ); + })} ++ {/* Header with Status and Response Code */} ++ )} +++ + {/* Response Body */} ++++ + {executionResult.success === true ? 'ā ' : + executionResult.success === false ? 'ā' : + 'ā³'} + + + {executionResult.success === true + ? `Success${executionResult.httpStatus ? ` (${executionResult.httpStatus})` : ''}` + : executionResult.success === false + ? `Error${executionResult.httpStatus ? ` (${executionResult.httpStatus})` : ''}` + : 'Processing...' + } + ++ + {new Date(executionResult.timestamp).toLocaleTimeString()} + ++ {executionResult.output && ( ++++ )} + {executionResult.error && ( +++Console Output:
+ ++ {executionResult.output.split('\n').map((line, index) => ( +++ {line.includes('š File saved! Click here to open downloads folder') ? ( + + {line.split('š File saved! Click here to open downloads folder')[0]} + + + ) : ( + line + )} ++ ))} +++ )} +++Error Details:
+ ++ {executionResult.error} +++ + Loading code... ++ ) : error ? ( +++ ) : ( +Error loading code: {error}
+ +++ ) + )} ++ ++++{code}+(null); + const [showModal, setShowModal] = useState(false); + + const categories = ['List All', 'Get Details'] as const; + const filteredEndpoints = QUICK_ENDPOINTS.filter(endpoint => endpoint.category === activeCategory); + + const handleEndpointClick = (endpoint: QuickApiEndpoint) => { + setSelectedEndpoint(endpoint); + setShowModal(true); + }; + + const handleCloseModal = () => { + setShowModal(false); + setSelectedEndpoint(null); + }; + + // Clear results when component mounts/unmounts + useEffect(() => { + return () => { + setSelectedEndpoint(null); + }; + }, []); + + return ( + ++ ); +} \ No newline at end of file diff --git a/recipe-portal/components/QuickApiModal.tsx b/recipe-portal/components/QuickApiModal.tsx new file mode 100644 index 00000000..ffb18d16 --- /dev/null +++ b/recipe-portal/components/QuickApiModal.tsx @@ -0,0 +1,259 @@ +'use client'; + +import { useState } from 'react'; +import { SmartParameterForm } from './SmartParameterForm'; +import { SmartParameter } from '../lib/smartParameters'; + +interface QuickApiEndpoint { + id: string; + name: string; + method: 'GET'; + path: string; + description: string; + category: 'List All' | 'Get Details'; + parameters: SmartParameter[]; + example?: string; +} + +interface QuickApiModalProps { + isOpen: boolean; + onClose: () => void; + endpoint: QuickApiEndpoint; + hasValidToken: boolean; + authToken?: string | null; +} + +interface ExecutionResult { + output: string; + error: string; + success: boolean; + timestamp: string; + httpStatus?: number; + requestUrl?: string; + requestMethod?: string; +} + +export function QuickApiModal({ isOpen, onClose, endpoint, hasValidToken, authToken }: QuickApiModalProps) { + const [paramValues, setParamValues] = useState+ {/* Header */} ++++ + {!hasValidToken && ( +Common GET Methods
++ Quickly test common Sigma API endpoints with minimal setup. Perfect for exploring your data and getting familiar with the API. +
+++ )} + + {/* Category Tabs */} ++ š ++++Authentication Required
+Please authenticate first to test these API endpoints.
+++ + {/* Endpoint Grid */} ++ {categories.map((category) => ( + + ))} +++ {filteredEndpoints.map((endpoint) => ( ++ + {/* Modal */} + {selectedEndpoint && ( +handleEndpointClick(endpoint)} + > + {/* Header */} ++ ))} +++ + {/* Description */} +++ ++ + {endpoint.method} + ++ {endpoint.parameters.length > 0 && ( + + {endpoint.parameters.length} param{endpoint.parameters.length !== 1 ? 's' : ''} + + )} ++ {endpoint.name} +
++ {endpoint.description} +
+ + {/* API Path */} +++API Endpoint:
++ {endpoint.method} {endpoint.path} +++ )} + >({}); + const [executing, setExecuting] = useState(false); + const [executionResult, setExecutionResult] = useState (null); + + const executeEndpoint = async () => { + setExecuting(true); + setExecutionResult(null); + + try { + // Build URL with path parameters + let url = endpoint.path; + endpoint.parameters.forEach(param => { + const value = paramValues[param.name]; + if (value) { + url = url.replace(`{${param.name}}`, value); + } + }); + + const response = await fetch('/api/call', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + endpoint: url, + method: endpoint.method, + parameters: { + path: {}, + query: {}, + header: {} + } + }) + }); + + const result = await response.json(); + setExecutionResult({ + ...result, + requestUrl: url, + requestMethod: endpoint.method + }); + + } catch (err) { + setExecutionResult({ + output: '', + error: err instanceof Error ? err.message : 'Unknown error', + success: false, + timestamp: new Date().toISOString() + }); + } finally { + setExecuting(false); + } + }; + + if (!isOpen) return null; + + return ( + ++ ); +} \ No newline at end of file diff --git a/recipe-portal/components/RecipeCard.tsx b/recipe-portal/components/RecipeCard.tsx new file mode 100644 index 00000000..940bff19 --- /dev/null +++ b/recipe-portal/components/RecipeCard.tsx @@ -0,0 +1,122 @@ +'use client'; + +import { useState } from 'react'; +import { Recipe } from '../lib/recipeScanner'; +import { CodeViewer } from './CodeViewer'; + +interface RecipeCardProps { + recipe: Recipe; + hasValidToken?: boolean; +} + +function getStatusBadge(recipe: Recipe, hasValidToken: boolean) { + const badges = []; + + // Check if this is a download recipe + const downloadRecipes = ['export-workbook-element-csv.js', 'export-workbook-pdf.js']; + const isDownloadRecipe = downloadRecipes.some(downloadFileName => + recipe.filePath.endsWith(downloadFileName) + ); + + if (isDownloadRecipe) { + badges.push({ + text: 'ā¬ļø Download', + className: 'bg-blue-100 text-blue-800' + }); + } + + // Only show "Ready to Run" if recipe has no variables AND (doesn't need auth OR has valid token) + if (recipe.envVariables.length === 0 && (!recipe.isAuthRequired || hasValidToken)) { + badges.push({ + text: 'Ready to Run', + className: 'bg-green-100 text-green-800' + }); + } + + return badges.length > 0 ? badges : null; +} + +function getCategoryIcon(category: string) { + const icons: Record+ {/* Header */} ++++ + {/* Content */} +++ ++ + {endpoint.method} + ++{endpoint.name}
++ {endpoint.path} ++++{endpoint.description}
+ {endpoint.example && ( +{endpoint.example}
+ )} + + {!hasValidToken && ( +++ )} + + {/* Parameters */} + {endpoint.parameters.length > 0 && ( ++ š ++++Authentication Required
+Please authenticate first to test this API endpoint.
+++ )} + + {/* Execute Button */} ++ + + {!hasValidToken && ( ++ + {/* Parameter Summary */} + {Object.keys(paramValues).length > 0 && Object.values(paramValues).some(v => v && v.trim()) && ( ++ Authentication required to call API +
+ )} +++ )} + + {/* Results */} + {executionResult && ( +Request Parameters
++ {endpoint.parameters.map(param => { + const value = paramValues[param.name]; + if (!value || !value.trim()) return null; + + return ( +++ {param.friendlyName}: {value} ++ ); + })} +++ )} +++ ++++ + {executionResult.success ? 'ā ' : 'ā'} + ++ + {new Date(executionResult.timestamp).toLocaleTimeString()} + ++ + {executionResult.success + ? `Success${executionResult.httpStatus ? ` (${executionResult.httpStatus})` : ''}` + : `Error${executionResult.httpStatus ? ` (${executionResult.httpStatus})` : ''}` + } + + {executionResult.requestUrl && ( +++ {executionResult.requestMethod} {executionResult.requestUrl} ++ )} ++ {executionResult.output && ( ++++ )} + {executionResult.error && ( +++Response:
+ ++ {executionResult.output} ++++ )} +Error:
++ {executionResult.error} ++= { + 'connections': 'š', + 'members': 'š„', + 'teams': 'š«', + 'workbooks': 'š', + 'embedding': 'š§', + 'authentication': 'š' + }; + + return icons[category.toLowerCase()] || 'š'; +} + +export function RecipeCard({ recipe, hasValidToken = false }: RecipeCardProps) { + const [showCodeViewer, setShowCodeViewer] = useState(false); + const badges = getStatusBadge(recipe, hasValidToken); + const icon = getCategoryIcon(recipe.category); + + return ( + + {/* Header */} ++ ); +} \ No newline at end of file diff --git a/recipe-portal/components/SmartParameterForm.tsx b/recipe-portal/components/SmartParameterForm.tsx new file mode 100644 index 00000000..cb9b061e --- /dev/null +++ b/recipe-portal/components/SmartParameterForm.tsx @@ -0,0 +1,455 @@ +'use client'; + +import { useState, useEffect } from 'react'; +import { SmartParameter } from '../lib/smartParameters'; + +interface SmartParameterFormProps { + parameters: SmartParameter[]; + values: Record++ + {/* Description */} +++ ++ {icon} ++ {badges && badges.map((badge, index) => ( + + {badge.text} + + ))} ++ {recipe.name} +
++ {recipe.description} +
+ + {/* Environment Variables */} + {recipe.envVariables.length > 0 && ( +++ )} + + +Required Variables:
++ {recipe.envVariables.slice(0, 3).map((envVar) => ( + + {envVar} + + ))} + {recipe.envVariables.length > 3 && ( + + +{recipe.envVariables.length - 3} more + + )} ++setShowCodeViewer(false)} + filePath={recipe.filePath} + fileName={recipe.filePath.split('/').pop() || 'recipe.js'} + envVariables={recipe.envVariables} + useEnvFile={false} + defaultTab="run" + readmePath={recipe.readmePath} + /> + ; + onChange: (values: Record ) => void; + authToken?: string | null; + onRunScript?: () => void; + executing?: boolean; + context?: 'recipe' | 'api'; + onShowReadme?: () => void; +} + +interface ResourceData { + id: string; + name: string; + description?: string; + [key: string]: any; +} + +export function SmartParameterForm({ + parameters, + values, + onChange, + authToken, + onRunScript, + executing = false, + context = 'recipe', + onShowReadme +}: SmartParameterFormProps) { + const [resourceData, setResourceData] = useState >({}); + const [loadingResources, setLoadingResources] = useState >({}); + + // Fetch resource data for dropdown parameters + useEffect(() => { + console.log('SmartParameterForm useEffect triggered - authToken changed:', authToken?.substring(0,20) + '...'); + + if (!authToken) { + setResourceData({}); + return; + } + + // Create an abort controller for this effect run + const abortController = new AbortController(); + let isCancelled = false; + + const fetchResources = async () => { + // Clear existing resource data at start + setResourceData({}); + + const resourceTypes = new Set (); + parameters.forEach(param => { + if (param.resourceType) { + resourceTypes.add(param.resourceType); + } + }); + + for (const resourceType of Array.from(resourceTypes)) { + // Check if this effect was cancelled + if (abortController.signal.aborted || isCancelled) { + console.log(`Effect cancelled, aborting ${resourceType} request`); + return; + } + + // Check if this resource type has dependencies + const param = parameters.find(p => p.resourceType === resourceType); + const dependentValue = param?.dependsOn ? values[param.dependsOn] : null; + + // Create a cache key that includes dependencies + const cacheKey = param?.dependsOn ? `${resourceType}_${dependentValue}` : resourceType; + + // Skip loading if dependency is not met + if (param?.dependsOn && !dependentValue) { + continue; + } + + setLoadingResources(prev => ({ ...prev, [resourceType]: true })); + + try { + let url = `/api/resources?type=${resourceType}&token=${encodeURIComponent(authToken)}`; + if (param?.dependsOn && dependentValue) { + // Map parameter names to expected API parameter names + const paramMapping: Record = { + 'WORKBOOK_ID': 'workbookId', + 'MEMBER_ID': 'memberId', + 'TEAM_ID': 'teamId' + }; + const apiParamName = paramMapping[param.dependsOn] || param.dependsOn.toLowerCase(); + url += `&${apiParamName}=${encodeURIComponent(dependentValue)}`; + } + + console.log(`Fetching ${resourceType} with token ${authToken?.substring(0,20)}...`); + console.log(`Dependent value for ${param?.dependsOn}:`, dependentValue); + + const response = await fetch(url, { signal: abortController.signal }); + + // Final check before processing response + if (abortController.signal.aborted || isCancelled) { + console.log(`Effect cancelled after ${resourceType} response, discarding results`); + return; + } + + if (response.ok) { + const data = await response.json(); + console.log(`Received ${resourceType} data (${data.data?.length || 0} items) with token ${authToken?.substring(0,20)}...`); + + // Only update state if not cancelled + if (!abortController.signal.aborted && !isCancelled) { + setResourceData(prev => ({ ...prev, [cacheKey]: data.data || [] })); + } + } else { + console.warn(`Failed to fetch ${resourceType}:`, response.statusText); + const errorText = await response.text(); + console.warn(`Error response:`, errorText); + } + } catch (error) { + if (error.name === 'AbortError') { + console.log(`Fetch ${resourceType} aborted`); + } else { + console.warn(`Error fetching ${resourceType}:`, error); + } + } finally { + if (!abortController.signal.aborted && !isCancelled) { + setLoadingResources(prev => ({ ...prev, [resourceType]: false })); + } + } + } + }; + + fetchResources(); + + // Cleanup function to cancel ongoing requests when token changes + return () => { + console.log('Cleaning up SmartParameterForm effect - cancelling ongoing requests'); + isCancelled = true; + abortController.abort(); + }; + }, [parameters, authToken, values]); + + const handleChange = (paramName: string, value: string) => { + const newValues = { ...values, [paramName]: value }; + + + // Clear dependent parameters when a parent parameter changes + parameters.forEach(param => { + if (param.dependsOn === paramName) { + newValues[param.name] = ''; + // Also clear cached resource data for dependent parameters + const dependentCacheKey = `${param.resourceType}_${value}`; + setResourceData(prev => { + const updated = { ...prev }; + Object.keys(updated).forEach(key => { + if (key.startsWith(`${param.resourceType}_`) && key !== dependentCacheKey) { + delete updated[key]; + } + }); + return updated; + }); + } + }); + + onChange(newValues); + }; + + const renderParameter = (param: SmartParameter) => { + const currentValue = values[param.name] || ''; + + // Handle date inputs + if (param.type === 'date') { + // Convert stored value to YYYY-MM-DD format if it's in MM/DD/YYYY format + const convertToISODate = (dateStr: string): string => { + if (!dateStr) return ''; + + // If it's already in YYYY-MM-DD format, return as-is + if (/^\d{4}-\d{2}-\d{2}$/.test(dateStr)) { + return dateStr; + } + + // If it's in MM/DD/YYYY format, convert it + if (/^\d{1,2}\/\d{1,2}\/\d{4}$/.test(dateStr)) { + const [month, day, year] = dateStr.split('/'); + return `${year}-${month.padStart(2, '0')}-${day.padStart(2, '0')}`; + } + + return dateStr; + }; + + // Convert from YYYY-MM-DD to display format and back + const displayValue = currentValue; + + return ( + + ++ ); + } + + // Handle resource-based dropdowns + if (param.resourceType && authToken) { + // Check if this parameter depends on another parameter + const dependentValue = param.dependsOn ? values[param.dependsOn] : null; + const cacheKey = param.dependsOn ? `${param.resourceType}_${dependentValue}` : param.resourceType; + + const resources = resourceData[cacheKey] || []; + const isLoading = loadingResources[param.resourceType]; + const isDisabled = param.dependsOn && !dependentValue; + + // Debug logging + if (param.resourceType === 'accountTypes') { + console.log('AccountTypes debug:', { + resources, + isLoading, + resourceDataKeys: Object.keys(resourceData), + fullResourceData: resourceData + }); + } + + return ( +{param.description} (Format: YYYY-MM-DD)
+ { + // HTML date input always provides YYYY-MM-DD format + handleChange(param.name, e.target.value); + }} + className="w-full px-3 py-2 border border-gray-300 rounded-md text-sm focus:border-blue-500 focus:ring-1 focus:ring-blue-500" + /> + {currentValue && ( ++ API format: {convertToISODate(currentValue)} +
+ )} ++ ++ ); + } + + // Handle predefined options + if (param.type === 'select' && param.options) { + return ( +{param.description}
+ + {isDisabled ? ( ++ Please select {param.dependsOn?.replace('_', ' ').toLowerCase()} first ++ ) : isLoading ? ( ++ + Loading {param.resourceType}... ++ ) : resources.length > 0 ? ( + + ) : param.options && param.options.length > 0 ? ( + + ) : ( ++ No {param.resourceType} available or authentication required ++ )} + + {currentValue && ( ++ Selected: {currentValue} +
+ )} ++ ++ ); + } + + // Handle boolean parameters + if (param.type === 'boolean') { + return ( +{param.description}
+ ++ ++ ); + } + + // Handle JSON parameters + if (param.type === 'json') { + return ( +{param.description}
+ ++ ++ ); + } + + // Handle regular input fields + const inputType = param.type === 'number' ? 'number' : + param.type === 'email' ? 'email' : 'text'; + + return ( +{param.description}
++ ++ ); + }; + + if (parameters.length === 0) { + return null; + } + + return ( +{param.description}
+ handleChange(param.name, e.target.value)} + placeholder={param.placeholder} + className="w-full px-3 py-2 border border-gray-300 rounded-md text-sm focus:border-blue-500 focus:ring-1 focus:ring-blue-500" + {...(param.validation && { + pattern: param.validation.pattern, + minLength: param.validation.minLength, + maxLength: param.validation.maxLength + })} + /> +++ ); +} \ No newline at end of file diff --git a/recipe-portal/lib/keyStorage.ts b/recipe-portal/lib/keyStorage.ts new file mode 100644 index 00000000..6b8f791a --- /dev/null +++ b/recipe-portal/lib/keyStorage.ts @@ -0,0 +1,271 @@ +import crypto from 'crypto'; +import fs from 'fs'; +import path from 'path'; +import os from 'os'; + +// File path for encrypted key storage - use system config directory +// This ensures keys are stored outside project directory and won't be committed to git +const getKeysDirectory = () => { + const platform = os.platform(); + let configDir; + + if (platform === 'win32') { + configDir = process.env.APPDATA || os.tmpdir(); + } else if (platform === 'darwin') { + configDir = path.join(os.homedir(), 'Library', 'Application Support'); + } else { + configDir = process.env.XDG_CONFIG_HOME || path.join(os.homedir(), '.config'); + } + + const sigmaDir = path.join(configDir, '.sigma-portal'); + + // Ensure directory exists + if (!fs.existsSync(sigmaDir)) { + fs.mkdirSync(sigmaDir, { recursive: true }); + } + + return sigmaDir; +}; + +const KEYS_CACHE_FILE = path.join(getKeysDirectory(), 'encrypted-keys.json'); + +// Algorithm and key derivation +const ALGORITHM = 'aes-256-cbc'; +const KEY_LENGTH = 32; // 256 bits + +/** + * Generate a machine-specific encryption key + * Uses system information to create a consistent key per machine + */ +function getMachineKey(): Buffer { + const machineInfo = [ + os.hostname(), + os.platform(), + os.arch(), + os.type() + ].join('|'); + + // Create a deterministic key from machine info + return crypto.scryptSync(machineInfo, 'sigma-portal-salt', KEY_LENGTH); +} + +/** + * Encrypt API configuration (credentials + server settings) + */ +function encryptCredentials(clientId: string, clientSecret: string, baseURL?: string, authURL?: string): string { + const key = getMachineKey(); + const iv = crypto.randomBytes(16); + const cipher = crypto.createCipheriv(ALGORITHM, key, iv); + + const data = JSON.stringify({ + clientId, + clientSecret, + baseURL: baseURL || 'https://aws-api.sigmacomputing.com/v2', + authURL: authURL || 'https://aws-api.sigmacomputing.com/v2/auth/token' + }); + let encrypted = cipher.update(data, 'utf8', 'hex'); + encrypted += cipher.final('hex'); + + return JSON.stringify({ + iv: iv.toString('hex'), + encrypted + }); +} + +/** + * Decrypt API configuration (credentials + server settings) + */ +function decryptCredentials(encryptedData: string): { clientId: string; clientSecret: string; baseURL: string; authURL: string } | null { + try { + const key = getMachineKey(); + const { iv, encrypted } = JSON.parse(encryptedData); + + const decipher = crypto.createDecipheriv(ALGORITHM, key, Buffer.from(iv, 'hex')); + + let decrypted = decipher.update(encrypted, 'hex', 'utf8'); + decrypted += decipher.final('utf8'); + + const parsed = JSON.parse(decrypted); + + // Ensure backward compatibility with old format + return { + clientId: parsed.clientId, + clientSecret: parsed.clientSecret, + baseURL: parsed.baseURL || 'https://aws-api.sigmacomputing.com/v2', + authURL: parsed.authURL || 'https://aws-api.sigmacomputing.com/v2/auth/token' + }; + } catch (error) { + console.error('Failed to decrypt credentials:', error); + return null; + } +} + +/** + * Store API configuration (credentials + server settings) encrypted on disk + */ +export async function storeCredentials(clientId: string, clientSecret: string, name: string = 'default', baseURL?: string, authURL?: string): Promise++ + {!authToken && parameters.some(p => p.resourceType) && ( ++ š {context === 'api' ? 'API Parameters' : 'Recipe Parameters'} +
++ Configure the parameters for this {context === 'api' ? 'API call' : 'recipe'}. Parameters with dropdowns will load available options automatically. +
+ + {/* Show download restrictions link for export recipes */} + {parameters.some(p => p.name === 'LIMIT') && onShowReadme && ( ++ +
+ )} +++ )} + + {parameters.map(renderParameter)} + + {onRunScript && ( ++ š Some parameters require authentication to load dropdown options. Please authenticate first to see all available choices. +
++ ++ )} +{ + try { + // Read existing credential sets + let allCredentials: Record = {}; + if (fs.existsSync(KEYS_CACHE_FILE)) { + const existingData = fs.readFileSync(KEYS_CACHE_FILE, 'utf-8'); + allCredentials = JSON.parse(existingData); + } + + const encryptedData = encryptCredentials(clientId, clientSecret, baseURL, authURL); + + // Store this set with its name + allCredentials[name] = { + encrypted: encryptedData, + storedAt: Date.now(), + version: '2.0' // Updated version to include server settings + }; + + // Mark as default if it's the first one + if (!allCredentials._metadata) { + allCredentials._metadata = { defaultSet: name }; + } + + fs.writeFileSync(KEYS_CACHE_FILE, JSON.stringify(allCredentials, null, 2)); + return true; + } catch (error) { + console.error('Failed to store credentials:', error); + return false; + } +} + +/** + * Retrieve and decrypt stored API configuration (credentials + server settings) + */ +export async function getStoredCredentials(name?: string): Promise<{ clientId: string; clientSecret: string; baseURL: string; authURL: string } | null> { + try { + if (!fs.existsSync(KEYS_CACHE_FILE)) { + return null; + } + + const allCredentials = JSON.parse(fs.readFileSync(KEYS_CACHE_FILE, 'utf-8')); + + // Use provided name, or default, or first available + let targetName = name; + if (!targetName) { + targetName = allCredentials._metadata?.defaultSet || Object.keys(allCredentials).find(k => k !== '_metadata'); + } + + if (!targetName || !allCredentials[targetName]) { + return null; + } + + const decrypted = decryptCredentials(allCredentials[targetName].encrypted); + return decrypted; + } catch (error) { + console.error('Failed to retrieve stored credentials:', error); + return null; + } +} + +/** + * Check if credentials are stored locally + */ +export async function hasStoredCredentials(): Promise { + try { + return fs.existsSync(KEYS_CACHE_FILE); + } catch (error) { + return false; + } +} + +/** + * Get list of stored credential set names + */ +export async function getStoredCredentialNames(): Promise { + try { + if (!fs.existsSync(KEYS_CACHE_FILE)) { + return []; + } + + const allCredentials = JSON.parse(fs.readFileSync(KEYS_CACHE_FILE, 'utf-8')); + return Object.keys(allCredentials).filter(k => k !== '_metadata'); + } catch (error) { + console.error('Failed to get credential names:', error); + return []; + } +} + +/** + * Get the default credential set name + */ +export async function getDefaultCredentialSetName(): Promise { + try { + if (!fs.existsSync(KEYS_CACHE_FILE)) { + return null; + } + + const allCredentials = JSON.parse(fs.readFileSync(KEYS_CACHE_FILE, 'utf-8')); + return allCredentials._metadata?.defaultSet || null; + } catch (error) { + return null; + } +} + +/** + * Set the default credential set + */ +export async function setDefaultCredentialSet(name: string): Promise { + try { + if (!fs.existsSync(KEYS_CACHE_FILE)) { + return false; + } + + const allCredentials = JSON.parse(fs.readFileSync(KEYS_CACHE_FILE, 'utf-8')); + if (!allCredentials[name]) { + return false; // Set doesn't exist + } + + if (!allCredentials._metadata) { + allCredentials._metadata = {}; + } + allCredentials._metadata.defaultSet = name; + + fs.writeFileSync(KEYS_CACHE_FILE, JSON.stringify(allCredentials, null, 2)); + return true; + } catch (error) { + console.error('Failed to set default credential set:', error); + return false; + } +} + +/** + * Clear stored credentials (optionally specific set) + */ +export async function clearStoredCredentials(name?: string): Promise { + try { + if (!fs.existsSync(KEYS_CACHE_FILE)) { + return true; + } + + if (!name) { + // Clear all + fs.unlinkSync(KEYS_CACHE_FILE); + return true; + } + + // Clear specific set + const allCredentials = JSON.parse(fs.readFileSync(KEYS_CACHE_FILE, 'utf-8')); + if (allCredentials[name]) { + delete allCredentials[name]; + + // Update default if we deleted it + if (allCredentials._metadata?.defaultSet === name) { + const remainingNames = Object.keys(allCredentials).filter(k => k !== '_metadata'); + allCredentials._metadata.defaultSet = remainingNames[0] || null; + } + + fs.writeFileSync(KEYS_CACHE_FILE, JSON.stringify(allCredentials, null, 2)); + } + + return true; + } catch (error) { + console.error('Failed to clear stored credentials:', error); + return false; + } +} \ No newline at end of file diff --git a/recipe-portal/lib/recipeScanner.ts b/recipe-portal/lib/recipeScanner.ts new file mode 100644 index 00000000..1a1a2e33 --- /dev/null +++ b/recipe-portal/lib/recipeScanner.ts @@ -0,0 +1,226 @@ +import fs from 'fs'; +import path from 'path'; + +export interface Recipe { + id: string; + name: string; + description: string; + category: string; + filePath: string; + envVariables: string[]; + isAuthRequired: boolean; + readmePath?: string; +} + +export interface RecipeCategory { + name: string; + recipes: Recipe[]; +} + +const RECIPES_PATH = path.join(process.cwd(), 'recipes'); + +/** + * Extract environment variables from a JavaScript file (excluding core auth variables) + */ +function extractEnvVariables(fileContent: string, fileName?: string): string[] { + const envRegex = /process\.env\.([A-Z_][A-Z0-9_]*)/g; + const matches = Array.from(fileContent.matchAll(envRegex)); + const envVars = matches.map(match => match[1]); + + // Core auth variables that are handled centrally + const coreAuthVars = new Set(['CLIENT_ID', 'SECRET', 'authURL', 'baseURL']); + + // Variables that are set programmatically by master scripts (don't expose in UI) + const masterScriptVars = new Set(['MEMBERID', 'WORKSPACEID']); + + // For master-script.js, be more aggressive about filtering + const isMasterScript = fileName?.includes('master-script'); + + // Filter out core auth variables and master script variables + let recipeSpecificVars = envVars.filter(envVar => + !coreAuthVars.has(envVar) && + !masterScriptVars.has(envVar) + ); + + // For master scripts, only include variables that are used in validation or logging + if (isMasterScript) { + const allowedMasterVars = new Set([ + 'EMAIL', 'NEW_MEMBER_FIRST_NAME', 'NEW_MEMBER_LAST_NAME', 'NEW_MEMBER_TYPE', + 'TEAMID', 'CONNECTIONID', 'WORKSPACE_NAME', 'PERMISSION' + ]); + recipeSpecificVars = recipeSpecificVars.filter(envVar => allowedMasterVars.has(envVar)); + } + + return Array.from(new Set(recipeSpecificVars)); // Remove duplicates +} + +/** + * Extract title and description from structured comments + */ +function extractDescription(fileContent: string): string { + const lines = fileContent.split('\n'); + let description = ''; + + for (const line of lines) { + const trimmed = line.trim(); + if (trimmed.startsWith('// Description:')) { + description = trimmed.substring(15).trim(); // Remove "// Description:" prefix + break; + } + } + + return description || 'No description available'; +} + +/** + * Extract title from structured comments + */ +function extractTitle(fileContent: string): string { + const lines = fileContent.split('\n'); + + for (const line of lines) { + const trimmed = line.trim(); + if (trimmed.startsWith('// Title:')) { + return trimmed.substring(9).trim(); // Remove "// Title:" prefix + } + } + + return ''; // Return empty string if no title found +} + +/** + * Generate a human-readable name from filename + */ +function generateRecipeName(filename: string): string { + return filename + .replace('.js', '') + .replace(/-/g, ' ') + .replace(/_/g, ' ') + .split(' ') + .map(word => word.charAt(0).toUpperCase() + word.slice(1)) + .join(' '); +} + +/** + * Check if recipe requires authentication (references get-access-token) + */ +function requiresAuth(fileContent: string): boolean { + return fileContent.includes('get-access-token') || fileContent.includes('getBearerToken'); +} + +/** + * Scan a single recipe file and extract metadata + */ +function scanRecipeFile(filePath: string, category: string): Recipe | null { + try { + const fileContent = fs.readFileSync(filePath, 'utf-8'); + const filename = path.basename(filePath); + + // Skip if not a JavaScript file + if (!filename.endsWith('.js')) { + return null; + } + + // Check for corresponding README file + const baseFilename = filename.replace('.js', ''); + const readmePath = path.join(path.dirname(filePath), `${baseFilename}.md`); + const hasReadme = fs.existsSync(readmePath); + + const extractedTitle = extractTitle(fileContent); + const recipeName = extractedTitle || generateRecipeName(filename); + + const recipe: Recipe = { + id: `${category}-${filename.replace('.js', '')}`, + name: recipeName, + description: extractDescription(fileContent), + category: category, + filePath: filePath, + envVariables: extractEnvVariables(fileContent, filename), + isAuthRequired: requiresAuth(fileContent), + readmePath: hasReadme ? readmePath : undefined + }; + + return recipe; + } catch (error) { + console.error(`Error scanning recipe file ${filePath}:`, error); + return null; + } +} + +/** + * Scan all recipes in the sigma-api-recipes directory + */ +export function scanAllRecipes(): RecipeCategory[] { + const categories: RecipeCategory[] = []; + + try { + // Check if recipes directory exists + if (!fs.existsSync(RECIPES_PATH)) { + console.warn(`Recipes directory not found at: ${RECIPES_PATH}`); + return categories; + } + + const items = fs.readdirSync(RECIPES_PATH); + + for (const item of items) { + const itemPath = path.join(RECIPES_PATH, item); + const stat = fs.statSync(itemPath); + + // Skip files (like package.json, get-access-token.js at root level) + if (!stat.isDirectory()) { + continue; + } + + // Skip hidden directories and node_modules + if (item.startsWith('.') || item === 'node_modules') { + continue; + } + + // Scan recipes in this category + const recipes: Recipe[] = []; + const categoryFiles = fs.readdirSync(itemPath); + + for (const file of categoryFiles) { + const filePath = path.join(itemPath, file); + const fileStat = fs.statSync(filePath); + + if (fileStat.isFile()) { + const recipe = scanRecipeFile(filePath, item); + if (recipe) { + recipes.push(recipe); + } + } + } + + if (recipes.length > 0) { + categories.push({ + name: item.charAt(0).toUpperCase() + item.slice(1), + recipes: recipes.sort((a, b) => a.name.localeCompare(b.name)) + }); + } + } + + return categories.sort((a, b) => a.name.localeCompare(b.name)); + } catch (error) { + console.error('Error scanning recipes:', error); + return categories; + } +} + +/** + * Find the authentication recipe (get-access-token.js) + */ +export function getAuthRecipe(): Recipe | null { + try { + const authFilePath = path.join(RECIPES_PATH, 'get-access-token.js'); + + if (fs.existsSync(authFilePath)) { + return scanRecipeFile(authFilePath, 'authentication'); + } + + return null; + } catch (error) { + console.error('Error finding auth recipe:', error); + return null; + } +} \ No newline at end of file diff --git a/recipe-portal/lib/requestAnalyzer.ts b/recipe-portal/lib/requestAnalyzer.ts new file mode 100644 index 00000000..6fbc6411 --- /dev/null +++ b/recipe-portal/lib/requestAnalyzer.ts @@ -0,0 +1,224 @@ +interface ApiRequest { + method: 'GET' | 'POST' | 'PUT' | 'DELETE'; + url: string; + headers: Record ; + body?: any; +} + +interface RequestPreview { + description: string; + request: ApiRequest; + curlCommand: string; +} + +export function analyzeRecipeRequest( + code: string, + envVariables: Record , + baseURL = 'https://aws-api.sigmacomputing.com/v2' +): RequestPreview | null { + try { + // Extract method and endpoint patterns + const methodMatch = code.match(/axios\.(get|post|put|delete)\s*\(/i); + const urlMatch = code.match(/['"`]([^'"`]*\/[^'"`]*)['"`]/); + + if (!methodMatch || !urlMatch) { + return null; + } + + const method = methodMatch[1].toUpperCase() as 'GET' | 'POST' | 'PUT' | 'DELETE'; + let endpoint = urlMatch[1]; + + // Replace variables in the endpoint + Object.entries(envVariables).forEach(([key, value]) => { + if (value) { + endpoint = endpoint.replace(`\${${key}}`, value); + endpoint = endpoint.replace(`\${process.env.${key}}`, value); + endpoint = endpoint.replace(new RegExp(`\\$\\{baseURL\\}`, 'g'), baseURL); + } + }); + + // Construct full URL + const fullUrl = endpoint.startsWith('http') ? endpoint : `${baseURL}${endpoint}`; + + // Extract headers + const headers: Record = { + 'Authorization': `Bearer ${envVariables.CLIENT_ID ? '[YOUR_ACCESS_TOKEN]' : '[CACHED_TOKEN]'}`, + 'Accept': 'application/json' + }; + + // Check for Content-Type + if (method !== 'GET' && code.includes('Content-Type')) { + headers['Content-Type'] = 'application/json'; + } + + let body: any = undefined; + let description = ''; + + // Analyze request body for POST/PUT requests + if (method === 'POST' || method === 'PUT') { + // Look for request body patterns + const bodyPatterns = [ + /exportOptions\s*=\s*\{([\s\S]*?)\}/, + /\{[\s\S]*?runAsynchronously:\s*true[\s\S]*?\}/, + /const\s+\w+\s*=\s*\{([\s\S]*?)\}/ + ]; + + for (const pattern of bodyPatterns) { + const bodyMatch = code.match(pattern); + if (bodyMatch) { + try { + // Build request body based on recipe type and parameters + body = buildRequestBody(code, envVariables); + break; + } catch (e) { + // If parsing fails, show a basic structure + body = { + "// Note": "Request body structure varies by recipe", + "// See": "Recipe code for exact structure" + }; + } + } + } + } + + // Generate description + if (code.includes('export')) { + description = method === 'POST' ? + 'Initiates an asynchronous export job' : + 'Downloads the export once ready'; + } else if (code.includes('workbooks')) { + description = 'Workbook-related API operation'; + } else if (code.includes('members')) { + description = 'Member management API operation'; + } else if (code.includes('teams')) { + description = 'Team management API operation'; + } else if (code.includes('connections')) { + description = 'Lists or manages API connections'; + } else { + description = `${method} request to Sigma API`; + } + + // Generate curl command + const curlCommand = generateCurlCommand({ method, url: fullUrl, headers, body }); + + return { + description, + request: { + method, + url: fullUrl, + headers, + body + }, + curlCommand + }; + + } catch (error) { + console.error('Error analyzing request:', error); + return null; + } +} + +function buildRequestBody(code: string, envVariables: Record ): any { + // Export workbook element (CSV/PDF) + if (code.includes('export') && code.includes('elementId')) { + const body: any = { + elementId: envVariables.ELEMENT_ID || 'ELEMENT_ID_HERE', + format: { type: 'csv' }, + runAsynchronously: true + }; + + if (code.includes('type: \'pdf\'')) { + body.format = { type: 'pdf', layout: 'portrait' }; + } + + // Add date range if present + if (envVariables.START_DATE && envVariables.END_DATE) { + body.parameters = { + DateFilter: `min:${envVariables.START_DATE},max:${envVariables.END_DATE}` + }; + } + + return body; + } + + // Export workbook (full workbook) + if (code.includes('export') && code.includes('workbookId')) { + return { + workbookId: envVariables.WORKBOOK_ID || 'WORKBOOK_ID_HERE', + format: { type: 'pdf', layout: 'portrait' } + }; + } + + // Create member + if (code.includes('members') && code.includes('POST')) { + return { + email: envVariables.EMAIL || 'user@example.com', + firstName: envVariables.NEW_MEMBER_FIRST_NAME || 'John', + lastName: envVariables.NEW_MEMBER_LAST_NAME || 'Doe', + accountType: envVariables.NEW_MEMBER_TYPE || 'viewer' + }; + } + + // Permission assignments + if (code.includes('permission')) { + const permission = envVariables.PERMISSION || envVariables.WORKSPACE_PERMISSION || 'view'; + return { + memberId: envVariables.MEMBERID || 'MEMBER_ID_HERE', + permission: permission + }; + } + + // Create workspace + if (code.includes('workspace') && code.includes('POST')) { + return { + name: envVariables.WORKSPACE_NAME || 'My Workspace', + noDuplicates: envVariables.NO_DUPLICATES === 'true' + }; + } + + // Generic fallback + return { + "// Note": "Request body varies by recipe", + "// Parameters": "Populated from user inputs" + }; +} + +function generateCurlCommand(request: ApiRequest): string { + let curl = `curl -X ${request.method} "${request.url}"`; + + // Add headers + Object.entries(request.headers).forEach(([key, value]) => { + curl += ` \\\n -H "${key}: ${value}"`; + }); + + // Add body for POST/PUT + if (request.body && (request.method === 'POST' || request.method === 'PUT')) { + const bodyJson = JSON.stringify(request.body, null, 2); + curl += ` \\\n -d '${bodyJson}'`; + } + + return curl; +} + +export function getApiDocumentationUrl(endpoint: string): string | null { + const baseDocsUrl = 'https://help.sigmacomputing.com/reference/'; + + // Map common endpoints to documentation + const endpointMap: Record = { + '/connections': 'listconnections', + '/members': 'listmembers', + '/teams': 'listteams', + '/workbooks': 'listworkbooks', + '/workspaces': 'listworkspaces', + '/export': 'exportworkbook', + '/embed': 'createembedurl' + }; + + for (const [pattern, docPath] of Object.entries(endpointMap)) { + if (endpoint.includes(pattern)) { + return baseDocsUrl + docPath; + } + } + + return 'https://help.sigmacomputing.com/reference/'; +} \ No newline at end of file diff --git a/recipe-portal/lib/smartParameters.ts b/recipe-portal/lib/smartParameters.ts new file mode 100644 index 00000000..1b90ec50 --- /dev/null +++ b/recipe-portal/lib/smartParameters.ts @@ -0,0 +1,488 @@ +export interface SmartParameter { + name: string; + type: 'teamId' | 'memberId' | 'workbookId' | 'workspaceId' | 'connectionId' | 'elementId' | 'email' | 'string' | 'number' | 'select' | 'json' | 'boolean' | 'bookmarkId' | 'templateId' | 'datasetId' | 'dataModelId' | 'accountTypeId' | 'date'; + required: boolean; + friendlyName: string; + description: string; + resourceType?: 'teams' | 'members' | 'workbooks' | 'workspaces' | 'connections' | 'bookmarks' | 'templates' | 'datasets' | 'dataModels' | 'accountTypes' | 'workbookElements' | 'materializationSchedules'; + placeholder?: string; + options?: Array<{label: string; value: string}>; + dependsOn?: string; + validation?: { + pattern?: string; + minLength?: number; + maxLength?: number; + }; +} + +// Parameter detection patterns +const PARAMETER_PATTERNS: Record > = { + 'TEAMID': { + type: 'teamId', + resourceType: 'teams', + friendlyName: 'Team', + description: 'Select the team to work with' + }, + 'MEMBERID': { + type: 'memberId', + resourceType: 'members', + friendlyName: 'Member', + description: 'Select the member to work with' + }, + 'WORKBOOK_ID': { + type: 'workbookId', + resourceType: 'workbooks', + friendlyName: 'Workbook', + description: 'Select the workbook to work with' + }, + 'WORKSPACEID': { + type: 'workspaceId', + resourceType: 'workspaces', + friendlyName: 'Workspace', + description: 'Select the workspace to work with' + }, + 'CONNECTIONID': { + type: 'connectionId', + resourceType: 'connections', + friendlyName: 'Connection', + description: 'Select the connection to work with' + }, + 'BOOKMARKID': { + type: 'bookmarkId', + resourceType: 'bookmarks', + friendlyName: 'Favorite/Bookmark', + description: 'Select the favorite or bookmark to work with' + }, + 'BOOKMARK_ID': { + type: 'bookmarkId', + resourceType: 'bookmarks', + friendlyName: 'Favorite/Bookmark', + description: 'Select the favorite or bookmark to work with' + }, + 'FAVORITEID': { + type: 'bookmarkId', + resourceType: 'bookmarks', + friendlyName: 'Favorite/Bookmark', + description: 'Select the favorite or bookmark to work with' + }, + 'FAVORITE_ID': { + type: 'bookmarkId', + resourceType: 'bookmarks', + friendlyName: 'Favorite/Bookmark', + description: 'Select the favorite or bookmark to work with' + }, + 'TEMPLATEID': { + type: 'templateId', + resourceType: 'templates', + friendlyName: 'Template', + description: 'Select the template to work with' + }, + 'TEMPLATE_ID': { + type: 'templateId', + resourceType: 'templates', + friendlyName: 'Template', + description: 'Select the template to work with' + }, + 'DATASETID': { + type: 'datasetId', + resourceType: 'datasets', + friendlyName: 'Dataset', + description: 'Select the dataset to work with' + }, + 'DATASET_ID': { + type: 'datasetId', + resourceType: 'datasets', + friendlyName: 'Dataset', + description: 'Select the dataset to work with' + }, + 'DATAMODELID': { + type: 'dataModelId', + resourceType: 'dataModels', + friendlyName: 'Data Model', + description: 'Select the data model to work with' + }, + 'DATAMODEL_ID': { + type: 'dataModelId', + resourceType: 'dataModels', + friendlyName: 'Data Model', + description: 'Select the data model to work with' + }, + 'DATA_MODEL_ID': { + type: 'dataModelId', + resourceType: 'dataModels', + friendlyName: 'Data Model', + description: 'Select the data model to work with' + }, + 'ACCOUNTTYPEID': { + type: 'accountTypeId', + resourceType: 'accountTypes', + friendlyName: 'Account Type', + description: 'Select the account type to work with' + }, + 'ACCOUNTTYPE_ID': { + type: 'accountTypeId', + resourceType: 'accountTypes', + friendlyName: 'Account Type', + description: 'Select the account type to work with' + }, + 'ACCOUNT_TYPE_ID': { + type: 'accountTypeId', + resourceType: 'accountTypes', + friendlyName: 'Account Type', + description: 'Select the account type to work with' + }, + 'ELEMENT_ID': { + type: 'select', + resourceType: 'workbookElements', + friendlyName: 'Workbook Element', + description: 'Select the workbook element to export', + placeholder: 'Select element...', + dependsOn: 'WORKBOOK_ID' + }, + 'SHEET_ID': { + type: 'select', + resourceType: 'materializationSchedules', + friendlyName: 'Schedule Name', + description: 'Select the materialization schedule to run', + placeholder: 'Select schedule...', + dependsOn: 'WORKBOOK_ID' + }, + 'EMAIL': { + type: 'email', + friendlyName: 'Email Address', + description: 'Enter a valid email address', + placeholder: 'user@example.com', + validation: { + pattern: '^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$' + } + }, + 'NEW_MEMBER_FIRST_NAME': { + type: 'string', + friendlyName: 'First Name', + description: 'Enter the member\'s first name', + placeholder: 'John' + }, + 'NEW_MEMBER_LAST_NAME': { + type: 'string', + friendlyName: 'Last Name', + description: 'Enter the member\'s last name', + placeholder: 'Smith' + }, + 'NEW_MEMBER_TYPE': { + type: 'select', + resourceType: 'accountTypes', + friendlyName: 'Member Type', + description: 'Select the member account type (fallback: admin, creator, viewer if API unavailable)', + placeholder: 'Select account type...', + options: [ + { label: 'Admin', value: 'admin' }, + { label: 'Creator', value: 'creator' }, + { label: 'Viewer', value: 'viewer' } + ] + }, + 'SYNC_PATH': { + type: 'json', + friendlyName: 'Sync Path', + description: 'JSON array representing the database path to sync (e.g., ["database", "schema"] for schema-level sync)', + placeholder: '["SAMPLE_DATABASE", "PUBLIC"]' + }, + 'USER_NAME_PATTERN': { + type: 'string', + friendlyName: 'Username Pattern', + description: 'Pattern for generating usernames', + placeholder: 'user_{index}' + }, + 'DRY_RUN': { + type: 'boolean', + friendlyName: 'Dry Run', + description: 'Preview mode - shows what would be deactivated without making actual changes', + placeholder: 'true or false' + }, + 'LIMIT': { + type: 'number', + friendlyName: 'Row Limit', + description: 'Maximum rows to export (Default: 100,000 | Max: 1,000,000) - Large downloads may timeout without batching', + placeholder: '100000', + validation: { + min: 1, + max: 1000000 + } + }, + 'PERMISSION': { + type: 'select', + friendlyName: 'Permission Level', + description: 'Select the permission level to grant', + placeholder: 'Select permission...', + options: [ + { label: 'View', value: 'view' }, + { label: 'Explore', value: 'explore' }, + { label: 'Contribute', value: 'contribute' }, + { label: 'Manage', value: 'manage' } + ] + }, + 'WORKSPACE_PERMISSION': { + type: 'select', + friendlyName: 'Workspace Permission Level', + description: 'Select the workspace permission level to grant', + placeholder: 'Select permission...', + options: [ + { label: 'View', value: 'view' }, + { label: 'Explore', value: 'explore' }, + { label: 'Organize', value: 'organize' }, + { label: 'Edit', value: 'edit' } + ] + }, + 'WORKSPACE_NAME': { + type: 'string', + friendlyName: 'Workspace Name', + description: 'Enter the name for the new workspace', + placeholder: 'My Workspace' + }, + 'NO_DUPLICATES': { + type: 'boolean', + friendlyName: 'Prevent Duplicates', + description: 'Prevent creating duplicate workspace names', + placeholder: 'true or false' + }, + 'NEW_WORKBOOK_NAME': { + type: 'string', + friendlyName: 'New Workbook Name', + description: 'Enter the name for the copied workbook', + placeholder: 'My Copied Workbook' + }, + 'NEW_WORKBOOK_DESCRIPTION': { + type: 'string', + friendlyName: 'New Workbook Description', + description: 'Enter a description for the copied workbook', + placeholder: 'Copy of the original workbook' + }, + 'START_DATE': { + type: 'date', + friendlyName: 'Start Date', + description: 'Select start date for export range', + placeholder: '2024-01-01' + }, + 'END_DATE': { + type: 'date', + friendlyName: 'End Date', + description: 'Select end date for export range', + placeholder: '2024-12-31' + }, + 'EXPORT_FILENAME': { + type: 'string', + friendlyName: 'Export Filename', + description: 'Enter filename for exported CSV file', + placeholder: 'my-export.csv' + }, + 'MAX_PAGES': { + type: 'number', + friendlyName: 'Max Pages', + description: 'Maximum number of pages to fetch (0 = all pages)', + placeholder: '5' + }, +}; + +// Fuzzy matching for parameter names +function fuzzyMatch(paramName: string, context?: { filePath?: string }): Partial | null { + const upperParam = paramName.toUpperCase(); + + // Direct match + if (PARAMETER_PATTERNS[upperParam]) { + return PARAMETER_PATTERNS[upperParam]; + } + + // OpenAPI parameter pattern matching + if (upperParam === 'TEAMID' || upperParam === 'TEAM_ID' || upperParam.includes('TEAMID')) { + return PARAMETER_PATTERNS['TEAMID']; + } + if (upperParam === 'MEMBERID' || upperParam === 'MEMBER_ID' || upperParam.includes('MEMBERID')) { + // Don't return MEMBERID pattern for master-script.js since it's set programmatically + if (context?.filePath?.includes('master-script')) { + return null; + } + return PARAMETER_PATTERNS['MEMBERID']; + } + if (upperParam === 'WORKBOOKID' || upperParam === 'WORKBOOK_ID' || upperParam.includes('WORKBOOKID')) { + return PARAMETER_PATTERNS['WORKBOOK_ID']; + } + if (upperParam === 'WORKSPACEID' || upperParam === 'WORKSPACE_ID' || upperParam.includes('WORKSPACEID')) { + // Don't return WORKSPACEID pattern for master-script.js since it's set programmatically + if (context?.filePath?.includes('master-script')) { + return null; + } + return PARAMETER_PATTERNS['WORKSPACEID']; + } + if (upperParam === 'CONNECTIONID' || upperParam === 'CONNECTION_ID' || upperParam.includes('CONNECTIONID')) { + return PARAMETER_PATTERNS['CONNECTIONID']; + } + if (upperParam === 'BOOKMARKID' || upperParam === 'BOOKMARK_ID' || upperParam.includes('BOOKMARKID')) { + return PARAMETER_PATTERNS['BOOKMARKID']; + } + if (upperParam === 'FAVORITEID' || upperParam === 'FAVORITE_ID' || upperParam.includes('FAVORITEID')) { + return PARAMETER_PATTERNS['FAVORITEID']; + } + if (upperParam === 'TEMPLATEID' || upperParam === 'TEMPLATE_ID' || upperParam.includes('TEMPLATEID')) { + return PARAMETER_PATTERNS['TEMPLATEID']; + } + if (upperParam === 'DATASETID' || upperParam === 'DATASET_ID' || upperParam.includes('DATASETID')) { + return PARAMETER_PATTERNS['DATASETID']; + } + + // Fuzzy matching for broader patterns + if (upperParam.includes('TEAM')) { + return PARAMETER_PATTERNS['TEAMID']; + } + if (upperParam.includes('MEMBER')) { + return PARAMETER_PATTERNS['MEMBERID']; + } + if (upperParam.includes('WORKBOOK')) { + return PARAMETER_PATTERNS['WORKBOOK_ID']; + } + if (upperParam.includes('WORKSPACE')) { + return PARAMETER_PATTERNS['WORKSPACEID']; + } + if (upperParam.includes('CONNECTION')) { + return PARAMETER_PATTERNS['CONNECTIONID']; + } + if (upperParam.includes('BOOKMARK')) { + return PARAMETER_PATTERNS['BOOKMARKID']; + } + if (upperParam.includes('FAVORITE')) { + return PARAMETER_PATTERNS['FAVORITEID']; + } + if (upperParam.includes('TEMPLATE')) { + return PARAMETER_PATTERNS['TEMPLATEID']; + } + if (upperParam.includes('DATASET')) { + return PARAMETER_PATTERNS['DATASETID']; + } + if (upperParam.includes('EMAIL')) { + return PARAMETER_PATTERNS['EMAIL']; + } + if (upperParam.includes('ELEMENT')) { + return PARAMETER_PATTERNS['ELEMENT_ID']; + } + if (upperParam.includes('SHEET')) { + return PARAMETER_PATTERNS['SHEET_ID']; + } + if (upperParam.includes('SYNC') && upperParam.includes('PATH')) { + return PARAMETER_PATTERNS['SYNC_PATH']; + } + if (upperParam.includes('USER') && upperParam.includes('PATTERN')) { + return PARAMETER_PATTERNS['USER_NAME_PATTERN']; + } + if (upperParam.includes('DRY') && upperParam.includes('RUN')) { + return PARAMETER_PATTERNS['DRY_RUN']; + } + if (upperParam.includes('LIMIT')) { + return PARAMETER_PATTERNS['LIMIT']; + } + if (upperParam.includes('WORKSPACE') && upperParam.includes('PERMISSION')) { + return PARAMETER_PATTERNS['WORKSPACE_PERMISSION']; + } + if (upperParam.includes('PERMISSION')) { + return PARAMETER_PATTERNS['PERMISSION']; + } + if (upperParam.includes('WORKSPACE') && upperParam.includes('NAME')) { + return PARAMETER_PATTERNS['WORKSPACE_NAME']; + } + if (upperParam.includes('NO') && upperParam.includes('DUPLICATE')) { + return PARAMETER_PATTERNS['NO_DUPLICATES']; + } + if (upperParam.includes('NEW') && upperParam.includes('WORKBOOK') && upperParam.includes('NAME')) { + return PARAMETER_PATTERNS['NEW_WORKBOOK_NAME']; + } + if (upperParam.includes('NEW') && upperParam.includes('WORKBOOK') && upperParam.includes('DESCRIPTION')) { + return PARAMETER_PATTERNS['NEW_WORKBOOK_DESCRIPTION']; + } + if (upperParam.includes('START') && upperParam.includes('DATE')) { + return PARAMETER_PATTERNS['START_DATE']; + } + if (upperParam.includes('END') && upperParam.includes('DATE')) { + return PARAMETER_PATTERNS['END_DATE']; + } + if (upperParam.includes('EXPORT') && upperParam.includes('FILENAME')) { + return PARAMETER_PATTERNS['EXPORT_FILENAME']; + } + if (upperParam.includes('MAX') && upperParam.includes('PAGE')) { + return PARAMETER_PATTERNS['MAX_PAGES']; + } + if (upperParam.includes('FIRST') && upperParam.includes('NAME')) { + return PARAMETER_PATTERNS['NEW_MEMBER_FIRST_NAME']; + } + if (upperParam.includes('LAST') && upperParam.includes('NAME')) { + return PARAMETER_PATTERNS['NEW_MEMBER_LAST_NAME']; + } + if (upperParam.includes('TYPE')) { + return PARAMETER_PATTERNS['NEW_MEMBER_TYPE']; + } + + return null; +} + +export function detectSmartParameters(envVariables: string[], context?: { filePath?: string }): SmartParameter[] { + // Filter out MEMBERID and WORKSPACEID for master-script.js since they're set programmatically + let filteredEnvVariables = envVariables; + if (context?.filePath?.includes('master-script')) { + filteredEnvVariables = envVariables.filter(param => + param !== 'MEMBERID' && param !== 'WORKSPACEID' + ); + } + + return filteredEnvVariables.map(paramName => { + let detected = fuzzyMatch(paramName, context); + + // Special handling for PERMISSION parameter based on context + if (paramName === 'PERMISSION' && context?.filePath?.includes('workspace')) { + detected = PARAMETER_PATTERNS['WORKSPACE_PERMISSION']; + } + + if (detected) { + return { + name: paramName, + required: true, + friendlyName: detected.friendlyName || paramName, + description: detected.description || `Enter ${paramName}`, + ...detected + } as SmartParameter; + } + + // Default fallback for unknown parameters + return { + name: paramName, + type: 'string' as const, + required: true, + friendlyName: paramName.replace(/_/g, ' ').toLowerCase().replace(/\b\w/g, l => l.toUpperCase()), + description: `Enter ${paramName}`, + placeholder: `Enter ${paramName.toLowerCase()}...` + }; + }); +} + +// Get parameter suggestions based on code analysis +export function analyzeRecipeCode(code: string, context?: { filePath?: string }): { suggestedParameters: string[] } { + const envVarPattern = /process\.env\.([A-Z_]+)/g; + const matches = code.match(envVarPattern) || []; + + const parameters = new Set (); + matches.forEach(match => { + const paramName = match.replace('process.env.', ''); + // Filter out auth parameters since they're handled centrally + if (!['CLIENT_ID', 'SECRET', 'authURL', 'baseURL'].includes(paramName)) { + parameters.add(paramName); + } + }); + + let filteredParameters = Array.from(parameters); + + // Filter out MEMBERID and WORKSPACEID for master-script.js since they're set programmatically + if (context?.filePath?.includes('master-script')) { + filteredParameters = filteredParameters.filter(param => + param !== 'MEMBERID' && param !== 'WORKSPACEID' + ); + } + + return { + suggestedParameters: filteredParameters + }; +} \ No newline at end of file diff --git a/recipe-portal/next.config.js b/recipe-portal/next.config.js new file mode 100644 index 00000000..0db62528 --- /dev/null +++ b/recipe-portal/next.config.js @@ -0,0 +1,4 @@ +/** @type {import('next').NextConfig} */ +const nextConfig = {} + +module.exports = nextConfig \ No newline at end of file diff --git a/recipe-portal/package.json b/recipe-portal/package.json new file mode 100644 index 00000000..5402a740 --- /dev/null +++ b/recipe-portal/package.json @@ -0,0 +1,28 @@ +{ + "name": "recipe-portal", + "version": "0.1.0", + "private": true, + "scripts": { + "dev": "next dev", + "build": "next build", + "start": "next start", + "lint": "next lint" + }, + "dependencies": { + "autoprefixer": "^10.4.21", + "dotenv": "^17.2.1", + "next": "15.4.6", + "react": "^18.2.0", + "react-dom": "^18.2.0" + }, + "devDependencies": { + "@types/node": "^20.0.0", + "@types/react": "^18.0.0", + "@types/react-dom": "^18.0.0", + "eslint": "^8.0.0", + "eslint-config-next": "15.4.6", + "postcss": "^8.0.0", + "tailwindcss": "^3.0.0", + "typescript": "^5.0.0" + } +} diff --git a/recipe-portal/postcss.config.js b/recipe-portal/postcss.config.js new file mode 100644 index 00000000..96bb01e7 --- /dev/null +++ b/recipe-portal/postcss.config.js @@ -0,0 +1,6 @@ +module.exports = { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +} \ No newline at end of file diff --git a/recipe-portal/public/crane.png b/recipe-portal/public/crane.png new file mode 100644 index 00000000..ea925235 Binary files /dev/null and b/recipe-portal/public/crane.png differ diff --git a/recipe-portal/recipes/.member-emails b/recipe-portal/recipes/.member-emails new file mode 100644 index 00000000..dd5bc17f --- /dev/null +++ b/recipe-portal/recipes/.member-emails @@ -0,0 +1,3 @@ +user1@company.com, +user2@company.com, +user3@company.com \ No newline at end of file diff --git a/recipe-portal/recipes/connections/list_all_connections.js b/recipe-portal/recipes/connections/list_all_connections.js new file mode 100644 index 00000000..8dd59088 --- /dev/null +++ b/recipe-portal/recipes/connections/list_all_connections.js @@ -0,0 +1,64 @@ +// Title: List All Connections +// Description: This script lists all connections in alphabetically order by name + +// Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// Import Axios for making HTTP requests +const axios = require('axios'); + +// Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Your base URL + +// Define an asynchronous function to fetch and sort connections +async function listConnections() { + const accessToken = await getBearerToken(); + if (!accessToken) { + console.error('Failed to obtain Bearer token.'); + return; + } + + try { + const endpoint = `${baseURL}/connections?includeArchived=false`; + console.log(`Fetching connections from: ${endpoint}`); + + // API request to fetch connections + const response = await axios.get(endpoint, { + headers: { 'Authorization': `Bearer ${accessToken}`, 'Accept': 'application/json' }, + }); + + const connections = response.data.entries; // Access 'entries' field in response + + console.log('Raw Response:', response.data); // Debugging log to confirm data structure + + if (connections && connections.length > 0) { + // Sort connections alphabetically by name + const sortedConnections = connections.sort((a, b) => + a.name.localeCompare(b.name) + ); + + // Display sorted connections + sortedConnections.forEach((connection, index) => { + console.log(`#${index + 1}: Name: ${connection.name}, ID: ${connection.connectionId}, Type: ${connection.type}`); + }); + } else { + console.log('No connections found.'); + } + } catch (error) { + console.error('Error fetching connections:', error.message); + if (error.response) { + console.error('Response Data:', error.response.data); + } + } +} + +// Execute the function to list connections if this script is run directly +if (require.main === module) { + listConnections(); +} + +// Export the listConnections function for reuse in other modules +module.exports = listConnections; diff --git a/recipe-portal/recipes/connections/list_all_connections.md b/recipe-portal/recipes/connections/list_all_connections.md new file mode 100644 index 00000000..5e63badf --- /dev/null +++ b/recipe-portal/recipes/connections/list_all_connections.md @@ -0,0 +1,22 @@ +# List All Connections + +## API Endpoints Used + +- `GET /v2/connections` ā [List Connections](https://help.sigmacomputing.com/reference/listconnections) + +## Expected Output +JSON array of all connections in alphabetical order by name +Each connection includes: connectionId, name, type, status, and other metadata +Console log showing total count of connections + +## Use Cases + +- Audit all data connections in your organization +- Get connection IDs for other automation scripts +- Monitor connection health and status +- Generate reports of available data sources + +## Important Notes + +- Results are automatically sorted alphabetically by connection name +- Includes all connection types (databases, warehouses, cloud storage, etc.) \ No newline at end of file diff --git a/recipe-portal/recipes/connections/sync_schema.js b/recipe-portal/recipes/connections/sync_schema.js new file mode 100644 index 00000000..287aea16 --- /dev/null +++ b/recipe-portal/recipes/connections/sync_schema.js @@ -0,0 +1,176 @@ +// Title: Sync Schema +// Description: This script automates the synchronization of tables within a specified schema (Snowflake). + +// Required Environment Variables: +// - CONNECTIONID: The ID of the connection to sync (available from connections list) +// - SYNC_PATH: JSON array representing the database path to sync +// Examples: +// - Schema sync: ["SAMPLE_DATABASE", "PUBLIC"] +// - Database sync: ["SAMPLE_DATABASE"] +// - Table sync: ["SAMPLE_DATABASE", "PUBLIC", "TABLE_NAME"] +// +// Note: This script will discover and sync ALL tables within the specified path. +// For schema-level sync, it will sync all tables in that schema. + +// Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +console.log('Environment Variables:', { + baseURL: process.env.baseURL, + CONNECTIONID: process.env.CONNECTIONID, + SYNC_PATH: process.env.SYNC_PATH, +}); + +// Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// Import Axios for making HTTP requests +const axios = require('axios'); + +// Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Your base URL +const connectionId = process.env.CONNECTIONID; // Connection ID +let syncPaths; +let bearerToken; // Global variable to store the fetched bearer token + +// Validate and parse SYNC_PATH +try { + if (!process.env.SYNC_PATH) { + throw new Error('SYNC_PATH is not defined in the .env file.'); + } + + // Parse SYNC_PATH as JSON + syncPaths = JSON.parse(process.env.SYNC_PATH); + + if (!Array.isArray(syncPaths)) { + throw new Error('SYNC_PATH must be a JSON array.'); + } +} catch (error) { + console.error('Error parsing SYNC_PATH:', error.message); + process.exit(1); +} + +// Function to initialize the bearer token +async function initializeBearerToken() { + bearerToken = await getBearerToken(); + if (!bearerToken) { + console.error('Failed to obtain Bearer token.'); + process.exit(1); + } + console.log('Bearer token initialized successfully.'); +} + +// Function to resolve the `inodeId` for a schema/folder +async function lookupInodeId(path) { + const endpoint = `${baseURL}/connection/${connectionId}/lookup`; + console.log(`Looking up inodeId for path: ${JSON.stringify(path)} at URL: ${endpoint}`); + + try { + const response = await axios.post(endpoint, { path }, { + headers: { 'Authorization': `Bearer ${bearerToken}`, 'Content-Type': 'application/json' }, + }); + + const { inodeId, kind } = response.data; + if (!inodeId || !kind) { + console.error(`Unexpected response: ${JSON.stringify(response.data)}`); + return null; + } + + console.log(`Resolved inodeId: ${inodeId} (kind: ${kind}) for path: ${JSON.stringify(path)}`); + return { inodeId, kind }; + } catch (error) { + console.error('Error resolving inodeId:', error.message); + if (error.response) { + console.error('Response Data:', error.response.data); + } + return null; + } +} + +// Function to list tables under a given inodeId +async function listTables(parentInodeId) { + const endpoint = `${baseURL}/files?typeFilters=table&parentId=${parentInodeId}`; + console.log(`Fetching tables for parentInodeId: ${parentInodeId} at URL: ${endpoint}`); + + try { + const response = await axios.get(endpoint, { + headers: { 'Authorization': `Bearer ${bearerToken}`, 'Accept': 'application/json' }, + }); + + const tables = response.data.entries || []; + console.log(`Found ${tables.length} tables under inodeId: ${parentInodeId}`); + tables.forEach((table) => { + console.log(`Table Name: ${table.name}, Table ID: ${table.id}`); + }); + + return tables.map((table) => ({ + id: table.id, + name: table.name, // Include table name for path construction + })); + } catch (error) { + console.error(`Error listing tables for inodeId: ${parentInodeId}`, error.message); + if (error.response) { + console.error('Response Data:', error.response.data); + } + return []; + } +} + +// Function to sync a specific table using its inodeId and full path +async function syncTable(inodeId, fullPath) { + const endpoint = `${baseURL}/connections/${connectionId}/sync`; + + const payload = { + path: fullPath, // Send the full path including the table name + }; + + try { + console.log(`Starting sync for table with path: ${JSON.stringify(fullPath)}`); + const response = await axios.post(endpoint, payload, { + headers: { 'Authorization': `Bearer ${bearerToken}`, 'Content-Type': 'application/json' }, + }); + + console.log(`Sync completed for table with path: ${JSON.stringify(fullPath)}`); + console.log('Response:', response.data); + } catch (error) { + console.error(`Error syncing table with inodeId: ${inodeId}`, error.message); + if (error.response) { + console.error('Response Data:', error.response.data); + } + } +} + +// Main function to list and sync tables +async function syncAllTables() { + console.log(`Starting sync for path: ${JSON.stringify(syncPaths)}`); + + // Step 1: Resolve inodeId for the sync path + const { inodeId } = await lookupInodeId(syncPaths); + if (!inodeId) { + console.error('Failed to resolve inodeId for path.'); + return; + } + + // Step 2: List tables under the resolved inodeId + const tables = await listTables(inodeId); + if (tables.length === 0) { + console.log('No tables found to sync.'); + return; + } + + console.log(`Found ${tables.length} tables to sync.`); + + // Step 3: Sync each table + for (const table of tables) { + const fullPath = [...syncPaths, table.name]; // Append table name to sync path + await syncTable(table.id, fullPath); + } +} + +// Execute the function if this script is run directly +if (require.main === module) { + (async () => { + await initializeBearerToken(); // Fetch the bearer token once + await syncAllTables(); + })(); +} diff --git a/recipe-portal/recipes/connections/sync_schema.md b/recipe-portal/recipes/connections/sync_schema.md new file mode 100644 index 00000000..05ffab4e --- /dev/null +++ b/recipe-portal/recipes/connections/sync_schema.md @@ -0,0 +1,27 @@ +# Sync Schema + +## API Endpoints Used + +- `POST /v2/connections/{connectionId}/lookup` ā [Lookup Connection Path](https://help.sigmacomputing.com/reference/lookupconnectionpath) +- `GET /v2/files` ā [List Files](https://help.sigmacomputing.com/reference/listfiles) +- `POST /v2/connections/{connectionId}/sync` ā [Sync Connection](https://help.sigmacomputing.com/reference/syncconnection) + +## Expected Output + +- Console log showing schema lookup results +- List of tables found within the specified schema +- Sync status for each table processed +- Success/failure confirmation for each table sync operation + +## Use Cases + +- Automatically sync new tables added to your data warehouse +- Refresh schema after structural changes in Snowflake +- Bulk synchronization of multiple tables in a schema +- Maintain up-to-date data source metadata + +## Important Notes + +- SYNC_PATH must be a valid JSON array representing the schema path +- Process can take time depending on number of tables and schema complexity +- Each table is synced individually with status reporting \ No newline at end of file diff --git a/recipe-portal/recipes/embedding/generate_workbook_embed_path.js b/recipe-portal/recipes/embedding/generate_workbook_embed_path.js new file mode 100644 index 00000000..8195b6c4 --- /dev/null +++ b/recipe-portal/recipes/embedding/generate_workbook_embed_path.js @@ -0,0 +1,92 @@ +// Title: Generate Workbook Embed Path +// Description: This script generates secure embed URLs for Sigma workbooks using the official embed API. + +// Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// Import Axios for making HTTP requests +const axios = require('axios'); + +// Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Base URL for the Sigma API +const workbookId = process.env.WORKBOOK_ID; // The workbook ID to generate embed URL for +const memberId = process.env.MEMBERID; // Member ID for user-specific embedding + +// Generate embed URL for a specific workbook +async function generateEmbedURL(workbookId, memberId, accessToken) { + const url = `${baseURL}/workbooks/${workbookId}/embeds`; + console.log(`Generating embed URL for workbook: ${workbookId}`); + + try { + const embedPayload = { + embedType: "secure", // Options: "secure" | "public" | "application" + sourceType: "workbook", // Options: "workbook" | "page" | "element" + sourceId: workbookId, // The workbook ID to embed + memberId: memberId, // Member ID for user-specific embedding + // Add any additional embed options here as needed + }; + + const response = await axios.post(url, embedPayload, { + headers: { + 'Authorization': `Bearer ${accessToken}`, + 'Content-Type': 'application/json' + } + }); + + console.log('Embed URL generated successfully:'); + console.log('Response:', JSON.stringify(response.data, null, 2)); + + return response.data; + } catch (error) { + console.error(`Error generating embed URL: ${error}`); + if (error.response) { + console.error(`Response status: ${error.response.status}`); + console.error(`Response headers: ${JSON.stringify(error.response.headers)}`); + console.error(`Response body: ${JSON.stringify(error.response.data, null, 2)}`); + } else { + console.error(`Error details: ${error.message}`); + } + return null; + } +} + +// Main function to manage the overall workflow +async function main() { + const accessToken = await getBearerToken(); + if (!accessToken) { + console.error('Failed to obtain Bearer token.'); + return; + } + + if (!workbookId) { + console.error('WORKBOOK_ID is required to generate embed URL.'); + return; + } + + if (!memberId) { + console.error('MEMBERID is required for user-specific embedding.'); + return; + } + + console.log(`Generating embed URL for workbook: ${workbookId}`); + console.log(`Member ID: ${memberId}`); + + const embedResult = await generateEmbedURL(workbookId, memberId, accessToken); + if (embedResult) { + console.log('\nā Embed URL generation completed successfully!'); + if (embedResult.url) { + console.log(`š Embed URL: ${embedResult.url}`); + } + } else { + console.error('ā Failed to generate embed URL.'); + } +} + +if (require.main === module) { + main(); // Executes the main function if the file is run directly +} + +module.exports = main; // Exports the main function to allow it to be used in other modules \ No newline at end of file diff --git a/recipe-portal/recipes/embedding/generate_workbook_embed_path.md b/recipe-portal/recipes/embedding/generate_workbook_embed_path.md new file mode 100644 index 00000000..7f572313 --- /dev/null +++ b/recipe-portal/recipes/embedding/generate_workbook_embed_path.md @@ -0,0 +1,25 @@ +# Generate Workbook Embed Path + +## API Endpoints Used + +- `POST /v2/workbooks/{workbookId}/embed` ā [Create Workbook Embed](https://help.sigmacomputing.com/reference/createworkbookembed) + +## Expected Output + +- Generated embed URL for the specified workbook +- URL parameters for member-specific embedding +- Embed path ready for integration into applications + +## Use Cases + +- Generate secure embed URLs for external applications +- Create member-specific embedded analytics experiences +- Integrate Sigma workbooks into custom applications +- Build embedded analytics solutions with user context + +## Important Notes + +- Requires valid MEMBERID for user-specific embedding +- Generated URLs include security parameters for authentication +- Embed URLs are time-limited for security purposes +- Essential for embedded analytics implementations \ No newline at end of file diff --git a/recipe-portal/recipes/get-access-token.js b/recipe-portal/recipes/get-access-token.js new file mode 100644 index 00000000..099862ea --- /dev/null +++ b/recipe-portal/recipes/get-access-token.js @@ -0,0 +1,220 @@ +// This script responds with a bearer token using the encrypted authentication system +// It integrates with the portal's encrypted credential storage instead of using .env files + +const fs = require('fs'); +const path = require('path'); +const os = require('os'); +const crypto = require('crypto'); +const axios = require('axios'); + +// Constants for encrypted credential storage (matching keyStorage.ts) +const ALGORITHM = 'aes-256-cbc'; +const KEY_LENGTH = 32; + +/** + * Generate machine-specific encryption key (matches keyStorage.ts) + */ +function getMachineKey() { + const machineInfo = [ + os.hostname(), + os.platform(), + os.arch(), + os.type() + ].join('|'); + + return crypto.scryptSync(machineInfo, 'sigma-portal-salt', KEY_LENGTH); +} + +/** + * Get the sigma portal config directory (matches keyStorage.ts) + */ +function getKeysDirectory() { + const platform = os.platform(); + let configDir; + + if (platform === 'win32') { + configDir = process.env.APPDATA || os.tmpdir(); + } else if (platform === 'darwin') { + configDir = path.join(os.homedir(), 'Library', 'Application Support'); + } else { + configDir = process.env.XDG_CONFIG_HOME || path.join(os.homedir(), '.config'); + } + + return path.join(configDir, '.sigma-portal'); +} + +/** + * Decrypt stored credentials (matches keyStorage.ts) + */ +function decryptCredentials(encryptedData) { + try { + const key = getMachineKey(); + const { iv, encrypted } = JSON.parse(encryptedData); + + const decipher = crypto.createDecipheriv(ALGORITHM, key, Buffer.from(iv, 'hex')); + + let decrypted = decipher.update(encrypted, 'hex', 'utf8'); + decrypted += decipher.final('utf8'); + + const parsed = JSON.parse(decrypted); + + return { + clientId: parsed.clientId, + clientSecret: parsed.clientSecret, + baseURL: parsed.baseURL || 'https://aws-api.sigmacomputing.com/v2', + authURL: parsed.authURL || 'https://aws-api.sigmacomputing.com/v2/auth/token' + }; + } catch (error) { + console.error('Failed to decrypt credentials:', error); + return null; + } +} + +/** + * Get stored credentials from encrypted storage + */ +function getStoredCredentials(configName) { + try { + const keysFile = path.join(getKeysDirectory(), 'encrypted-keys.json'); + + if (!fs.existsSync(keysFile)) { + return null; + } + + const allCredentials = JSON.parse(fs.readFileSync(keysFile, 'utf-8')); + + // Use provided name, or default, or first available + let targetName = configName; + if (!targetName) { + targetName = allCredentials._metadata?.defaultSet || Object.keys(allCredentials).find(k => k !== '_metadata'); + } + + if (!targetName || !allCredentials[targetName]) { + return null; + } + + return decryptCredentials(allCredentials[targetName].encrypted); + } catch (error) { + console.error('Failed to retrieve stored credentials:', error); + return null; + } +} + +/** + * Check for cached valid token + */ +function getCachedToken(clientId) { + try { + const tempDir = os.tmpdir(); + const configHash = clientId ? clientId.substring(0, 8) : 'default'; + const tokenFile = path.join(tempDir, `sigma-portal-token-${configHash}.json`); + + if (!fs.existsSync(tokenFile)) { + return null; + } + + const tokenData = JSON.parse(fs.readFileSync(tokenFile, 'utf8')); + const now = Date.now(); + + // Check if token is still valid + if (tokenData.expiresAt && now < tokenData.expiresAt) { + // Update last accessed time + tokenData.lastAccessed = Date.now(); + fs.writeFileSync(tokenFile, JSON.stringify(tokenData)); + + return tokenData.token; + } else { + // Remove expired token + fs.unlinkSync(tokenFile); + return null; + } + } catch (error) { + return null; + } +} + +/** + * Cache a new token + */ +function cacheToken(token, clientId, expiresIn = 3600) { + try { + const tempDir = os.tmpdir(); + const configHash = clientId ? clientId.substring(0, 8) : 'default'; + const tokenFile = path.join(tempDir, `sigma-portal-token-${configHash}.json`); + + const tokenData = { + token: token, + clientId: clientId, + createdAt: Date.now(), + lastAccessed: Date.now(), + expiresAt: Date.now() + (expiresIn * 1000) // Convert to milliseconds + }; + + fs.writeFileSync(tokenFile, JSON.stringify(tokenData)); + } catch (error) { + console.warn('Failed to cache token:', error); + } +} + +/** + * Get bearer token using new authentication system + */ +async function getBearerToken(configName) { + try { + // Get stored credentials + const credentials = getStoredCredentials(configName); + + if (!credentials) { + throw new Error('No authentication configuration found. Please use the portal to set up authentication first.'); + } + + // Check for cached valid token first + const cachedToken = getCachedToken(credentials.clientId); + if (cachedToken) { + console.log('Using cached authentication token.'); + return cachedToken; + } + + console.log('No cached token found. Script will authenticate normally.'); + + // Request new token + const requestData = new URLSearchParams({ + grant_type: 'client_credentials', + client_id: credentials.clientId, + client_secret: credentials.clientSecret, + }); + + console.log(`URL sent to Sigma: ${credentials.authURL}`); + + const response = await axios.post(credentials.authURL, requestData, { + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + }, + }); + + const token = response.data.access_token; + const expiresIn = response.data.expires_in || 3600; + + console.log('Bearer token obtained successfully:', response.data.access_token); + + // Cache the new token + cacheToken(token, credentials.clientId, expiresIn); + + return token; + } catch (error) { + console.error('Error obtaining Bearer token:', error.response ? error.response.data : error.message); + return null; + } +} + +// Check if this script is being run directly +if (require.main === module) { + getBearerToken().then(token => { + console.log('Token acquired:', token); + }).catch(error => { + console.error('Failed to acquire token:', error); + }); +} + +// Export the getBearerToken function +module.exports = getBearerToken; \ No newline at end of file diff --git a/recipe-portal/recipes/launch.json b/recipe-portal/recipes/launch.json new file mode 100644 index 00000000..0b4c4ef8 --- /dev/null +++ b/recipe-portal/recipes/launch.json @@ -0,0 +1,13 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "type": "node", + "request": "launch", + "name": "Debug authenticate-bearer", + "program": "${workspaceFolder}/sigma-api-recipes/authenticate-bearer.js", + "console": "integratedTerminal" + } + ] + } + \ No newline at end of file diff --git a/recipe-portal/recipes/members/bulk-create-members.js b/recipe-portal/recipes/members/bulk-create-members.js new file mode 100644 index 00000000..bf1b92bf --- /dev/null +++ b/recipe-portal/recipes/members/bulk-create-members.js @@ -0,0 +1,183 @@ +// Title: Bulk Create Members +// Description: This script creates multiple new members in Sigma from a list of emails, with configurable member type and duplicate checking. + +// 1: Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// 2: Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// 3: Import Axios for making HTTP requests +const axios = require('axios'); +const fs = require('fs'); +const path = require('path'); + +// 4: Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Your base URL +const defaultMemberType = process.env.NEW_MEMBER_TYPE || 'view'; // Default member type for all created members +const emailListPath = path.join(__dirname, '..', '.member-emails'); // Path to the file containing member emails + +// Function to check if a member already exists +async function memberExists(email, accessToken) { + const requestURL = `${baseURL}/members?search=${encodeURIComponent(email)}`; + try { + const response = await axios.get(requestURL, { + headers: { + 'Authorization': `Bearer ${accessToken}`, + 'Accept': 'application/json', + } + }); + + // Check if any member in the results matches the email exactly + const members = response.data.entries || []; + const exists = members.some(member => member.email.toLowerCase() === email.toLowerCase()); + return exists; + } catch (error) { + console.error(`Error checking if member exists (${email}):`, error.response ? error.response.data : error.message); + return false; // Assume doesn't exist if we can't check + } +} + +// Function to create a single member +async function createMember(email, memberType, accessToken) { + // Extract first and last name from email (fallback approach) + const emailUsername = email.split('@')[0]; + const nameParts = emailUsername.split(/[._-]/); + const firstName = nameParts[0] || 'User'; + const lastName = nameParts[1] || 'Name'; + + const requestURL = `${baseURL}/members`; + + try { + const response = await axios.post(requestURL, { + email: email, + firstName: firstName.charAt(0).toUpperCase() + firstName.slice(1), // Capitalize first letter + lastName: lastName.charAt(0).toUpperCase() + lastName.slice(1), // Capitalize first letter + memberType: memberType + }, { + headers: { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${accessToken}` + } + }); + + return { + success: true, + memberId: response.data.memberId, + email: email, + memberType: response.data.memberType + }; + } catch (error) { + return { + success: false, + email: email, + error: error.response ? error.response.data : error.message + }; + } +} + +// Main function to process bulk member creation +async function bulkCreateMembers() { + console.log('Starting bulk member creation process...'); + + // Get access token + const accessToken = await getBearerToken(); + if (!accessToken) { + console.error('Failed to obtain Bearer token.'); + return; + } + + // Check if email file exists + if (!fs.existsSync(emailListPath)) { + console.error(`Email list file not found: ${emailListPath}`); + console.error('Please create a .member-emails file in the recipes directory with comma-separated email addresses.'); + return; + } + + // Read and parse email list + let emails; + try { + const emailContent = fs.readFileSync(emailListPath, 'utf-8'); + emails = emailContent.split(',').map(email => email.trim()).filter(email => email.length > 0); + } catch (error) { + console.error('Error reading email list file:', error.message); + return; + } + + if (emails.length === 0) { + console.error('No emails found in the email list file.'); + return; + } + + console.log(`Found ${emails.length} email(s) to process:`); + console.log(`Default member type: ${defaultMemberType}`); + console.log(''); + + // Process each email + const results = { + created: [], + skipped: [], + failed: [] + }; + + for (let i = 0; i < emails.length; i++) { + const email = emails[i]; + console.log(`Processing ${i + 1}/${emails.length}: ${email}`); + + // Check if member already exists + const exists = await memberExists(email, accessToken); + if (exists) { + console.log(` Skipped: Member already exists`); + results.skipped.push({ email, reason: 'Already exists' }); + continue; + } + + // Create the member + const result = await createMember(email, defaultMemberType, accessToken); + if (result.success) { + console.log(` Created: Member ID ${result.memberId}`); + results.created.push(result); + } else { + console.log(` Failed: ${typeof result.error === 'object' ? JSON.stringify(result.error) : result.error}`); + results.failed.push(result); + } + + // Small delay to avoid rate limiting + await new Promise(resolve => setTimeout(resolve, 500)); + } + + // Print summary + console.log(''); + console.log('=== BULK MEMBER CREATION SUMMARY ==='); + console.log(`Total processed: ${emails.length}`); + console.log(`Successfully created: ${results.created.length}`); + console.log(`Skipped (already exist): ${results.skipped.length}`); + console.log(`Failed: ${results.failed.length}`); + + if (results.created.length > 0) { + console.log(''); + console.log('Created members:'); + results.created.forEach(member => { + console.log(` ${member.email} ā ${member.memberId} (${member.memberType})`); + }); + } + + if (results.failed.length > 0) { + console.log(''); + console.log('Failed members:'); + results.failed.forEach(failure => { + console.log(` ${failure.email} ā Error: ${typeof failure.error === 'object' ? JSON.stringify(failure.error) : failure.error}`); + }); + } + + console.log(''); + console.log('Bulk member creation completed.'); +} + +// Execute the function if this script is run directly +if (require.main === module) { + bulkCreateMembers(); +} + +// Export the function for reuse +module.exports = bulkCreateMembers; \ No newline at end of file diff --git a/recipe-portal/recipes/members/bulk-create-members.md b/recipe-portal/recipes/members/bulk-create-members.md new file mode 100644 index 00000000..365261b5 --- /dev/null +++ b/recipe-portal/recipes/members/bulk-create-members.md @@ -0,0 +1,73 @@ +# Bulk Create Members + +This script creates multiple new members in Sigma from a list of email addresses. + +## Prerequisites + +- Valid authentication credentials with member management permissions +- Email list file containing the email addresses of members to create + +## Setup + +1. **Create email list file**: Create a file named `.member-emails` in the `/recipes` directory +2. **Format the file**: Add email addresses separated by commas: + ``` + user1@company.com,user2@company.com,user3@company.com + ``` + +## Environment Variables + +| Variable | Required | Default | Description | +|----------|----------|---------|-------------| +| `NEW_MEMBER_TYPE` | No | `view` | Default account type for all created members (`admin`, `build`, `view`, `analyze`, `act`) | + +## Features + +- **Duplicate Prevention**: Automatically checks if members already exist and skips them +- **Smart Naming**: Extracts first/last names from email addresses (e.g., `john.doe@company.com` ā `John Doe`) +- **Batch Processing**: Processes multiple emails with rate limiting to avoid API throttling +- **Detailed Reporting**: Provides summary of created, skipped, and failed members +- **Error Handling**: Continues processing even if individual member creation fails + +## Usage + +1. Set up your `.member-emails` file with comma-separated email addresses +2. Configure `NEW_MEMBER_TYPE` in your `.env` file (optional, defaults to `view`) +3. Run the script + +## Output + +The script provides detailed progress for each email and a final summary: + +``` +Processing 1/3: user1@company.com + Created: Member ID abc123xyz +Processing 2/3: user2@company.com + Skipped: Member already exists +Processing 3/3: user3@company.com + Failed: Invalid email format + +=== BULK MEMBER CREATION SUMMARY === +Total processed: 3 +Successfully created: 1 +Skipped (already exist): 1 +Failed: 1 +``` + +## Common Use Cases + +- **Initial Setup**: Create multiple team members during organization setup +- **Team Onboarding**: Add new team members in bulk +- **Testing**: Create test users for team assignment workflows + +## Related Scripts + +After creating members in bulk, you can use: +- `bulk-assign-team.js` - Assign the newly created members to teams +- `master-script.js` - Complete individual member onboarding with workspace and connection permissions + +## Notes + +- Members are created with basic information derived from their email addresses +- For more detailed member setup (custom names, individual permissions), use the individual member creation scripts +- The script uses the same `.member-emails` file as the team bulk assignment script for consistency \ No newline at end of file diff --git a/recipe-portal/recipes/members/bulk-deactivate.js b/recipe-portal/recipes/members/bulk-deactivate.js new file mode 100644 index 00000000..ff4d39c6 --- /dev/null +++ b/recipe-portal/recipes/members/bulk-deactivate.js @@ -0,0 +1,115 @@ +// Title: Bulk Deactivate +// Description: This script identifies users in Sigma matching a specified name pattern, retrieves their status, and deactivates (soft-deletes) them. + +// Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// Import Axios for making HTTP requests +const axios = require('axios'); + +// Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Base URL for the Sigma API +const userNamePattern = new RegExp(process.env.USER_NAME_PATTERN, 'i'); // Regex pattern for user names (case-insensitive) +const dryRun = process.env.DRY_RUN === 'true'; // Boolean flag for dry run mode (preview without actual deactivation) + +// Function to fetch the isInactive status of a user by memberId +async function fetchIsInactiveStatus(memberId, accessToken) { + const url = `${baseURL}/members/${memberId}`; // API endpoint to get member details + console.log(`Fetching isInactive status for memberId: ${memberId} at URL: ${url}`); + try { + const response = await axios.get(url, { + headers: { + Authorization: `Bearer ${accessToken}`, + }, + }); + return response.data.isInactive || false; // Return isInactive status or false if not set + } catch (error) { + console.error(`Error fetching isInactive status for memberId: ${memberId}`, error.response ? error.response.data : error); + return false; // Default to false if an error occurs + } +} + +// Function to list all members and filter by name using the regex pattern +async function listUsers(accessToken) { + const url = `${baseURL}/members?includeInactive=true`; // Include inactive users in the list + console.log(`Listing users from: ${url}`); + try { + const response = await axios.get(url, { + headers: { + Authorization: `Bearer ${accessToken}`, + }, + }); + + // Filter users by name (firstName + lastName) using regex pattern + const filteredUsers = response.data.entries.filter(user => + userNamePattern.test(`${user.firstName} ${user.lastName}`) + ); + + // Fetch isInactive status for each filtered user + const usersWithStatus = []; + for (const user of filteredUsers) { + const isInactive = await fetchIsInactiveStatus(user.memberId, accessToken); + if (!isInactive) { + usersWithStatus.push({ ...user, isInactive }); + } + } + + console.log('Filtered Users with isInactive Status:', usersWithStatus); + return usersWithStatus; + } catch (error) { + console.error(`Error fetching users: ${error}`); + return []; + } +} + +// Function to "soft-delete" (deactivate) a member +async function deleteUser(user, accessToken) { + try { + const url = `${baseURL}/members/${user.memberId}`; + const response = await axios.delete(url, { + headers: { + Authorization: `Bearer ${accessToken}`, + Accept: 'application/json', + }, + }); + console.log(`Deactivated (deleted) user: ${user.firstName} ${user.lastName} (ID: ${user.memberId})`); + console.log('Response:', response.data); + } catch (error) { + console.error(`Error deactivating (deleting) user ${user.firstName} ${user.lastName}:`, error.response ? error.response.data : error); + } +} + +// Main function to manage the overall workflow +async function main() { + const accessToken = await getBearerToken(); + if (!accessToken) { + console.error('Failed to obtain Bearer token.'); + return; + } + + const users = await listUsers(accessToken); + if (users.length > 0) { + console.log(`Found ${users.length} active user(s) matching the pattern.`); + + if (dryRun) { + console.log('š DRY RUN MODE: Preview of users that would be deactivated:'); + for (const user of users) { + console.log(`- Would deactivate: ${user.firstName} ${user.lastName} (${user.email}) [ID: ${user.memberId}]`); + } + console.log('No actual changes made. Set DRY_RUN=false to perform actual deactivation.'); + } else { + console.log('š LIVE MODE: Deactivating matched users...'); + for (const user of users) { + await deleteUser(user, accessToken); // Actually deactivate the user + } + } + } else { + console.log('No active users matched the specified pattern.'); + } +} + +// Execute the main function +main(); diff --git a/recipe-portal/recipes/members/bulk-deactivate.md b/recipe-portal/recipes/members/bulk-deactivate.md new file mode 100644 index 00000000..186c3745 --- /dev/null +++ b/recipe-portal/recipes/members/bulk-deactivate.md @@ -0,0 +1,25 @@ +# Bulk Deactivate + +## API Endpoints Used + +- `GET /v2/members` ā [List Members](https://help.sigmacomputing.com/reference/listmembers) +- `DELETE /v2/members/{memberId}` ā [Deactivate Member](https://help.sigmacomputing.com/reference/deletemember) + +## Expected Output + +- List of users matching the specified name pattern +- Deactivation status for each matched user +- Summary of total users processed and deactivated + +## Use Cases + +- Remove multiple test accounts with similar naming patterns +- Bulk deactivate users from specific departments +- Clean up accounts based on naming conventions +- Automated user lifecycle management + +## Important Notes + +- ā ļø Uses pattern matching - review matches carefully before proceeding +- Performs soft deletion (deactivation) - users cannot access but data remains +- Set DRY_RUN=true for preview mode to see matches without actually deactivating users \ No newline at end of file diff --git a/recipe-portal/recipes/members/create-connection-permission.js b/recipe-portal/recipes/members/create-connection-permission.js new file mode 100644 index 00000000..680dc6e1 --- /dev/null +++ b/recipe-portal/recipes/members/create-connection-permission.js @@ -0,0 +1,52 @@ +// Title: Create Connection Permission +// Description: This script provides permission to a connection, using the connectionId from .env. + +// 1: Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// 2" Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// 3: Import Axios for making HTTP requests +const axios = require('axios'); + +// 4: Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Your base URL +const memberId = process.env.MEMBERID; // Retrieve the memberId, from .env +const connectionId = process.env.CONNECTIONID; // Retrieve the connectionId from .env + +async function addNewConnectionPermission() { + const accessToken = await getBearerToken(); + if (!accessToken) { + console.error('Failed to obtain Bearer token.'); + return; + } + + const requestURL = `${baseURL}/connections/${connectionId}/grants`; // Corrected URL structure + console.log(`URL sent to Sigma: ${requestURL}`); + + try { + // The 'grantee' structure might need adjustment based on Sigma's expected payload. + const response = await axios.post(requestURL, { + grants: [{ + grantee: { memberId: memberId }, // Specifies the member receiving the permission + permission: 'usage' // Using "usage" since we created this user with as a Viewer in earlier steps + }] + }, { + headers: { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${accessToken}` + } + }); + + console.log('New connection permission added successfully:', JSON.stringify(response.data, null, 2)); + } catch (error) { + console.error('Error adding new connection permission:', error.response ? error.response.data : error.message); + } +} + +if (require.main === module) { + addNewConnectionPermission(); +} + +module.exports = addNewConnectionPermission; diff --git a/recipe-portal/recipes/members/create-connection-permission.md b/recipe-portal/recipes/members/create-connection-permission.md new file mode 100644 index 00000000..43bc33c0 --- /dev/null +++ b/recipe-portal/recipes/members/create-connection-permission.md @@ -0,0 +1,24 @@ +# Create Connection Permission + +## API Endpoints Used + +- `POST /v2/connections/{connectionId}/grants` ā [Create Connection Grant](https://help.sigmacomputing.com/reference/createconnectiongrant) + +## Expected Output + +- Confirmation of connection permission grant +- Permission details including access level and connection information +- Member and connection summary + +## Use Cases + +- Grant data source access to specific users +- Control who can use particular database connections +- Manage data security and access governance +- Set up connection-level permissions for compliance + +## Important Notes + +- Requires valid MEMBERID and CONNECTIONID in environment variables +- Connection grants control access to underlying data sources +- Essential for data governance and security compliance \ No newline at end of file diff --git a/recipe-portal/recipes/members/create-new.js b/recipe-portal/recipes/members/create-new.js new file mode 100644 index 00000000..89ef49f3 --- /dev/null +++ b/recipe-portal/recipes/members/create-new.js @@ -0,0 +1,112 @@ +// Title: Create New Member +// Description: This script creates a new member in Sigma after ensuring the email does not already exist. + +// 1: Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// 2: Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// 3: Import Axios for making HTTP requests +const axios = require('axios'); + +// 4: Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Your base URL +const baseEmail = process.env.EMAIL; // Retrieve the base email from environment variables + +// Dynamically generate a unique email using the base email in the format: baseEmail+mmddhhmm@sigmacomputing.com +const now = new Date(); +const timestamp = `${String(now.getMonth() + 1).padStart(2, '0')}${String(now.getDate()).padStart(2, '0')}${String(now.getHours()).padStart(2, '0')}${String(now.getMinutes()).padStart(2, '0')}`; +const newMemberEmail = `${baseEmail.split('@')[0]}+${timestamp}@${baseEmail.split('@')[1]}`; +console.log(`Generated email for new member: ${newMemberEmail}`); + +// Load additional member details from the environment variables +const newMemberFirstName = process.env.NEW_MEMBER_FIRST_NAME; +const newMemberLastName = process.env.NEW_MEMBER_LAST_NAME; +const newMemberType = process.env.NEW_MEMBER_TYPE; + +async function memberExists(email, accessToken) { + const requestURL = `${baseURL}/members?search=${encodeURIComponent(email)}`; + console.log(`Checking if member exists with search parameter: ${email}`); + try { + const response = await axios.get(requestURL, { + headers: { + 'Authorization': `Bearer ${accessToken}`, + 'Accept': 'application/json', + } + }); + + // Log the full response for debugging + console.log('Response data:', JSON.stringify(response.data, null, 2)); + + // Check if any member in the results matches the email exactly + const members = response.data.entries || []; + const exists = members.some(member => member.email.toLowerCase() === email.toLowerCase()); + + console.log(`Member check result: ${exists ? 'Exists' : 'Does not exist'}`); + return exists; + } catch (error) { + console.error('Error checking member existence:', error.response ? error.response.data : error.message); + throw new Error('Failed to check member existence.'); + } +} + +// Function to create a new member +async function addNewMember() { + const accessToken = await getBearerToken(); + if (!accessToken) { + console.error('Failed to obtain Bearer token.'); + return; + } + + // Log the environment variables to validate inputs + console.log(`New member details: + Email: ${newMemberEmail} + First Name: ${newMemberFirstName} + Last Name: ${newMemberLastName} + Member Type: ${newMemberType}`); + + // Check if the member already exists + const exists = await memberExists(newMemberEmail, accessToken); + if (exists) { + console.log(`Member with email ${newMemberEmail} already exists. No action taken.`); + return; + } + + const requestURL = `${baseURL}/members`; + console.log(`URL sent to Sigma: ${requestURL}`); + + try { + // Make the API request to create the new member + const response = await axios.post(requestURL, { + email: newMemberEmail, + firstName: newMemberFirstName, + lastName: newMemberLastName, + memberType: newMemberType, // Ensure this is passed correctly + }, { + headers: { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${accessToken}` + } + }); + + // Log the successful response + const { memberId, memberType: createdMemberType } = response.data; + console.log('New member added successfully:'); + console.log(`Member ID: ${memberId}`); + console.log(`Account Type: ${createdMemberType}`); + return { memberId, email: newMemberEmail, memberType: createdMemberType }; // Return member info for master script + } catch (error) { + // Handle errors and log details + console.error('Error adding new member:', error.response ? error.response.data : error.message); + return null; + } +} + +// Execute the function if this script is run directly +if (require.main === module) { + addNewMember(); +} + +// Export the function for reuse +module.exports = addNewMember; \ No newline at end of file diff --git a/recipe-portal/recipes/members/create-new.md b/recipe-portal/recipes/members/create-new.md new file mode 100644 index 00000000..14a966f5 --- /dev/null +++ b/recipe-portal/recipes/members/create-new.md @@ -0,0 +1,28 @@ +# Create New Member + +## API Endpoints Used + +- `GET /v2/members?search={email}` ā [List Members](https://help.sigmacomputing.com/reference/listmembers) +- `GET /v2/accountTypes` ā [List Account Types](https://help.sigmacomputing.com/reference/listaccounttypes) +- `POST /v2/members` ā [Create Member](https://help.sigmacomputing.com/reference/createmember) + +## Expected Output + +- Email verification check results +- New member creation confirmation with generated memberId +- Complete user profile of newly created member + +## Use Cases + +- Onboard new employees to Sigma +- Programmatically create user accounts +- Bulk user provisioning workflows +- Integration with HR systems + +## Important Notes + +- Script first verifies email doesn't already exist to prevent duplicates +- Member type dropdown dynamically loads available account types from your Sigma organization +- Requires complete user information: email, first name, last name, account type +- Email is automatically generated with timestamp to ensure uniqueness +- Returns the new memberId for use in subsequent operations \ No newline at end of file diff --git a/recipe-portal/recipes/members/create-workspace-permission.js b/recipe-portal/recipes/members/create-workspace-permission.js new file mode 100644 index 00000000..50eb9372 --- /dev/null +++ b/recipe-portal/recipes/members/create-workspace-permission.js @@ -0,0 +1,94 @@ +// Title: Create Workspace Permission +// Description: This script grants workspace permissions to either a member or team, with configurable permission levels (view, edit, admin). + +// 1: Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// 2" Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// 3: Import Axios for making HTTP requests +const axios = require('axios'); + +// 4: Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Your base URL +const memberId = process.env.MEMBERID; // Retrieve the memberId, from .env (optional - use either memberId or teamId) +const teamId = process.env.TEAMID; // Retrieve the teamId, from .env (optional - use either memberId or teamId) +const workspaceId = process.env.WORKSPACEID; // Retrieve the WorkspaceID from .env +// Map common permission names to valid Sigma workspace permissions +const rawPermission = process.env.PERMISSION || 'view'; +const permissionMapping = { + 'view': 'view', + 'explore': 'explore', + 'organize': 'organize', + 'edit': 'edit', + 'manage': 'edit', // Map 'manage' to 'edit' (highest available) + 'admin': 'edit', // Map 'admin' to 'edit' (highest available) + 'full': 'edit' // Map 'full' to 'edit' (highest available) +}; + +const permission = permissionMapping[rawPermission.toLowerCase()] || 'view'; + +async function addNewWorkspacePermission() { + // Validate the final permission value + const validPermissions = ['view', 'explore', 'organize', 'edit']; + if (!validPermissions.includes(permission)) { + console.error(`Error: Invalid permission "${rawPermission}". Valid options: ${validPermissions.join(', ')}`); + return; + } + + // Log permission mapping if different + if (rawPermission.toLowerCase() !== permission) { + console.log(`Permission mapped: "${rawPermission}" ā "${permission}"`); + } + const accessToken = await getBearerToken(); + if (!accessToken) { + console.error('Failed to obtain Bearer token.'); + return; + } + + const requestURL = `${baseURL}/workspaces/${workspaceId}/grants`; + console.log(`URL sent to Sigma: ${requestURL}`); + + // Validate that either memberId or teamId is provided (but not both) + if (!memberId && !teamId) { + console.error('Error: Either MEMBERID or TEAMID must be provided in environment variables'); + return; + } + + if (memberId && teamId) { + console.error('Error: Cannot specify both MEMBERID and TEAMID. Choose one.'); + return; + } + + // Build grantee object based on provided ID + const grantee = memberId ? { memberId } : { teamId }; + const granteeType = memberId ? 'member' : 'team'; + const granteeId = memberId || teamId; + + console.log(`Granting ${permission} permission to ${granteeType}: ${granteeId}`); + + try { + const response = await axios.post(requestURL, { + grants: [{ + grantee, + permission + }] + }, { + headers: { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${accessToken}` + } + }); + + console.log('New workspace permission added successfully:', JSON.stringify(response.data, null, 2)); + } catch (error) { + console.error('Error adding new workspace permission:', error.response ? error.response.data : error.message); + } +} + +if (require.main === module) { + addNewWorkspacePermission(); +} + +module.exports = addNewWorkspacePermission; diff --git a/recipe-portal/recipes/members/create-workspace-permission.md b/recipe-portal/recipes/members/create-workspace-permission.md new file mode 100644 index 00000000..4cbb6780 --- /dev/null +++ b/recipe-portal/recipes/members/create-workspace-permission.md @@ -0,0 +1,28 @@ +# Create Workspace Permission + +## API Endpoints Used + +- `POST /v2/workspaces/{workspaceId}/grants` ā [Create Workspace Grant](https://help.sigmacomputing.com/reference/createworkspacegrant) + +## Expected Output + +- Confirmation of permission grant creation +- Permission details including access level and scope +- Member and workspace information summary + +## Use Cases + +- Grant workspace access to specific users or teams +- Set up user/team permissions for project collaboration +- Manage workspace-level security and access control +- Establish content sharing permissions with different access levels + +## Important Notes + +- **Grantee Options**: Provide either MEMBERID (for individual users) or TEAMID (for teams) - not both +- **Permission Levels**: + - `view` - Read-only access to workspace content + - `edit` - Can modify and create content in workspace + - `admin` - Full administrative access including user management +- **Required Parameters**: WORKSPACEID and PERMISSION must be specified +- Workspace grants control access to all content within the workspace \ No newline at end of file diff --git a/recipe-portal/recipes/members/create-workspace.js b/recipe-portal/recipes/members/create-workspace.js new file mode 100644 index 00000000..69076e53 --- /dev/null +++ b/recipe-portal/recipes/members/create-workspace.js @@ -0,0 +1,100 @@ +// Title: Create Workspace +// Description: This script creates a new workspace, named using the memberId from .env. + +// 1: Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// 2" Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// 3: Import Axios for making HTTP requests +const axios = require('axios'); + +// 4: Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Your base URL +const memberId = process.env.MEMBERID; // Retrieve the memberId to create a workspace for +const workspaceName = process.env.WORKSPACE_NAME || `Workspace for Member ${process.env.MEMBERID}`; // Custom name or default +const noDuplicates = process.env.NO_DUPLICATES !== 'false'; // Default to true unless explicitly set to false + +async function addNewWorkspace() { + const accessToken = await getBearerToken(); + if (!accessToken) { + console.error('Failed to obtain Bearer token.'); + return; + } + + const requestURL = `${baseURL}/workspaces`; + console.log(`URL sent to Sigma: ${requestURL}`); + console.log(`Creating workspace: "${workspaceName}" with noDuplicates: ${noDuplicates}`); + + try { + const response = await axios.post(requestURL, { + name: workspaceName, + noDuplicates: noDuplicates + }, { + headers: { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${accessToken}` + } + }); + + // Handle both new workspace creation and existing workspace (duplicate prevention) + let workspaceId = null; + let isNewWorkspace = false; + + if (response.data && response.data.workspaceId) { + // New workspace created + workspaceId = response.data.workspaceId; + isNewWorkspace = true; + } else if (response.data && response.data.workspaceIds && response.data.workspaceIds.length > 0) { + // Existing workspace found (duplicate prevention) + workspaceId = response.data.workspaceIds[0]; + isNewWorkspace = false; + } + + if (workspaceId) { + if (isNewWorkspace) { + console.log('New workspace created successfully:'); + } else { + console.log('Workspace found (duplicate name prevented, using existing):'); + } + console.log(` Workspace ID: ${workspaceId}`); + console.log(` Workspace Name: "${response.data.name || workspaceName}"`); + if (response.data.url) { + console.log(` URL: ${response.data.url}`); + } + if (response.data.path) { + console.log(` Path: ${response.data.path}`); + } + // Show all available fields for reference + console.log(' All returned fields:', Object.keys(response.data)); + if (Object.keys(response.data).length <= 5) { + console.log(' Full response:', JSON.stringify(response.data, null, 2)); + } + return workspaceId; // Return workspace ID for master script + } else { + console.log('Workspace creation failed - unexpected response format:'); + console.log(' Full response:', JSON.stringify(response.data, null, 2)); + return null; + } + } catch (error) { + if (error.response) { + const errorData = error.response.data; + if (errorData.code === 'conflict' || errorData.message?.includes('duplicate') || errorData.message?.includes('already exists')) { + console.log('Workspace creation failed - duplicate name:'); + console.log(` A workspace named "${workspaceName}" already exists.`); + console.log(' Set NO_DUPLICATES=false to allow duplicates, or choose a different WORKSPACE_NAME.'); + } else { + console.error('Error creating workspace:', errorData); + } + } else { + console.error('Network error creating workspace:', error.message); + } + } +} + +if (require.main === module) { + addNewWorkspace(); +} + +module.exports = addNewWorkspace; diff --git a/recipe-portal/recipes/members/create-workspace.md b/recipe-portal/recipes/members/create-workspace.md new file mode 100644 index 00000000..8f15ebbc --- /dev/null +++ b/recipe-portal/recipes/members/create-workspace.md @@ -0,0 +1,24 @@ +# Create Workspace + +## API Endpoints Used + +- `POST /v2/workspaces` ā [Create Workspace](https://help.sigmacomputing.com/reference/createworkspace) + +## Expected Output + +- Confirmation of new workspace creation +- Workspace details including workspaceId and name +- Owner assignment confirmation + +## Use Cases + +- Create dedicated workspaces for new projects or teams +- Organize content by department or use case +- Set up isolated environments for different user groups +- Establish workspace structure for new organizations + +## Important Notes + +- Workspace name is automatically generated using the MEMBERID +- Creating user becomes the initial workspace owner +- Workspace provides isolated content organization \ No newline at end of file diff --git a/recipe-portal/recipes/members/deactivate-existing.js b/recipe-portal/recipes/members/deactivate-existing.js new file mode 100644 index 00000000..11e87fb2 --- /dev/null +++ b/recipe-portal/recipes/members/deactivate-existing.js @@ -0,0 +1,37 @@ +// Title: Deactivate Existing Member +// Description: This script deactivates an existing member. + +require('dotenv').config({ path: 'recipes/.env' }); +const getBearerToken = require('../get-access-token'); +const axios = require('axios'); + +const baseURL = process.env.baseURL; +const memberId = process.env.MEMBERID; + +async function deleteMember() { + const accessToken = await getBearerToken(); + if (!accessToken) { + console.error('Failed to obtain Bearer token.'); + return; + } + + const requestURL = `${baseURL}/members/${memberId}`; + + try { + console.log(`URL sent to Sigma: ${requestURL}`); + + // Note: Axios delete method does not need to pass memberId as data in the body for this operation + const response = await axios.delete(requestURL, { + headers: { + 'Authorization': `Bearer ${accessToken}`, + // 'Content-Type': 'application/json' is not necessary for a delete operation without a body + } + }); + + console.log('Member deleted successfully:', JSON.stringify(response.data, null, 2)); + } catch (error) { + console.error('Error deleting member:', error.response ? error.response.data : error.message); + } +} + +deleteMember(); diff --git a/recipe-portal/recipes/members/deactivate-existing.md b/recipe-portal/recipes/members/deactivate-existing.md new file mode 100644 index 00000000..3242271d --- /dev/null +++ b/recipe-portal/recipes/members/deactivate-existing.md @@ -0,0 +1,24 @@ +# Deactivate Existing Member + +## API Endpoints Used + +- `DELETE /v2/members/{memberId}` ā [Deactivate Member](https://help.sigmacomputing.com/reference/deletemember) + +## Expected Output + +- Confirmation of member deactivation +- Final member status showing account as inactive +- Cleanup confirmation for associated permissions + +## Use Cases + +- Remove users who have left the organization +- Deactivate accounts for security compliance +- Clean up unused or test accounts +- Manage user lifecycle for offboarding + +## Important Notes + +- ā ļø This performs a soft delete (deactivation), not permanent removal +- Deactivated users cannot access Sigma but their data remains +- Action cannot be easily reversed - contact support for reactivation \ No newline at end of file diff --git a/recipe-portal/recipes/members/get-member-details.js b/recipe-portal/recipes/members/get-member-details.js new file mode 100644 index 00000000..2895e01f --- /dev/null +++ b/recipe-portal/recipes/members/get-member-details.js @@ -0,0 +1,58 @@ +// Title: Get Member Details +// Description: This script retrieves the details of a specific member based on MEMBERID. + +// 1: Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// 2: Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// 3: Import Axios for making HTTP requests +const axios = require('axios'); + +// 4: Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Your base URL +const memberId = process.env.MEMBERID; // Member ID to retrieve details for + +// Define an asynchronous function to get member details. +async function getMemberDetails() { + // Obtain a bearer token using the previously imported function. + const accessToken = await getBearerToken(); + // If unable to obtain a token, log an error message and exit the function. + if (!accessToken) { + console.log('Failed to obtain Bearer token.'); + return; + } + + // Validate MEMBERID is provided + if (!memberId) { + console.error('MEMBERID is required but not provided in the .env file.'); + return; + } + + const memberURL = `${baseURL}/members/${memberId}`; + console.log(`Fetching member details for MEMBERID: ${memberId}`); + + try { + // Make a GET request to the specified URL, including the bearer token in the request headers for authentication. + const response = await axios.get(memberURL, { + headers: { + 'Authorization': `Bearer ${accessToken}`, + 'Accept': 'application/json' + } + }); + + console.log(`URL sent to Sigma: ${memberURL}`); // Log the constructed URL before sending the request + + // Log the fetched member details to the console in a readable JSON format. + const memberDetails = response.data; + console.log("Member Details:", JSON.stringify(memberDetails, null, 2)); + + } catch (error) { + // If the request fails, log the error details. + console.error('Error retrieving member details:', error.response ? error.response.data : error.message); + } +} + +// Execute the function to get member details. +getMemberDetails(); \ No newline at end of file diff --git a/recipe-portal/recipes/members/get-member-details.md b/recipe-portal/recipes/members/get-member-details.md new file mode 100644 index 00000000..80883d89 --- /dev/null +++ b/recipe-portal/recipes/members/get-member-details.md @@ -0,0 +1,25 @@ +# Get Member Details + +## API Endpoints Used + +- `GET /v2/members?search={email}` ā [List Members](https://help.sigmacomputing.com/reference/listmembers) +- `GET /v2/members/{memberId}` ā [Get Member](https://help.sigmacomputing.com/reference/getmember) + +## Expected Output + +- Detailed member information including profile, permissions, and team memberships +- Account type, status, creation date, and last login information +- Associated team and workspace permissions + +## Use Cases + +- Look up specific user account details +- Audit individual user permissions +- Troubleshoot user access issues +- Verify user account configuration + +## Important Notes + +- Can search by either EMAIL or MEMBERID environment variable +- If using email, the @ character is automatically URL-encoded +- Returns complete user profile and permission details \ No newline at end of file diff --git a/recipe-portal/recipes/members/list-all.js b/recipe-portal/recipes/members/list-all.js new file mode 100644 index 00000000..841fe4ea --- /dev/null +++ b/recipe-portal/recipes/members/list-all.js @@ -0,0 +1,133 @@ +// Title: List All Members +// Description: This script lists all members in the organization with pagination support. +// +// PREREQUISITES: +// - Valid authentication credentials with admin/member management permissions +// - Organization must have member data to retrieve +// - For large organizations: Results are paginated, use LIMIT and MAX_PAGES parameters to control output + +// Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// Import Axios for making HTTP requests +const axios = require('axios'); +const fs = require('fs'); // Import File System for saving output + +// Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Your base URL +const limit = parseInt(process.env.LIMIT) || 100000; // Maximum members to retrieve (Default: 100,000 | Max: 1,000,000) +const maxPages = parseInt(process.env.MAX_PAGES) || 0; // Maximum pages to fetch (0 = all pages) +const maxLimit = 1000000; + +async function listMembers() { + try { + // Validate row limit + let actualLimit = limit; + if (limit > maxLimit) { + actualLimit = maxLimit; + } + + console.log('Authenticating...'); + let token = await getBearerToken(); + if (!token) { + console.log('ERROR: Authentication failed'); + return; + } + console.log('Authentication successful'); + + console.log('Fetching members (limit: ' + actualLimit + ')...'); + + let hasMore = true; + let nextPage = null; + let currentPage = 0; + let allMembers = []; + + while (hasMore && (maxPages === 0 || currentPage < maxPages)) { + try { + currentPage++; + + // Use 1000 per page (API max) for efficiency, but respect overall actualLimit + const perPageLimit = Math.min(1000, actualLimit - allMembers.length); + let url = baseURL + '/members?limit=' + perPageLimit; + if (nextPage) { + url += '&page=' + nextPage; + } + + console.log('Fetching page ' + currentPage + '...'); + + const response = await axios.get(url, { + headers: { Authorization: 'Bearer ' + token } + }); + + // Process current page members + const entries = response.data.entries || []; + console.log('Found ' + entries.length + ' members on page ' + currentPage); + + // Collect all members for JSON output + allMembers = allMembers.concat(entries); + + // Check if we've reached the specified limit + if (allMembers.length >= actualLimit) { + console.log('Reached specified limit of ' + actualLimit + ' members'); + allMembers = allMembers.slice(0, actualLimit); + hasMore = false; + break; + } + + // Handle different possible response structures for pagination + const apiHasMore = response.data.hasMore; + const apiNextPage = response.data.nextPage; + + // If hasMore is undefined but nextPage exists, assume there are more pages + // If hasMore is explicitly false, respect that + if (apiHasMore === false) { + hasMore = false; + } else if (apiNextPage) { + hasMore = true; + } else { + hasMore = false; + } + + nextPage = apiNextPage; + + } catch (error) { + console.log('Error fetching members: ' + error.message); + break; + } + } + + console.log('\\n=== MEMBER DETAILS ==='); + console.log('Found ' + allMembers.length + ' members total:'); + console.log(''); + + // Display detailed information for each member in a clean format + allMembers.forEach((member, index) => { + console.log('=== Member #' + (index + 1) + ' ==='); + console.log('Name: ' + ((member.firstName + ' ' + member.lastName).trim() || 'Unknown')); + console.log('Email: ' + (member.email || 'No email')); + console.log('Type: ' + (member.memberType || 'N/A')); + console.log('Member ID: ' + (member.memberId || 'Unknown')); + console.log('Created: ' + (member.createdAt ? new Date(member.createdAt).toLocaleDateString() : 'Unknown')); + console.log('Updated: ' + (member.updatedAt ? new Date(member.updatedAt).toLocaleDateString() : 'Unknown')); + console.log(''); + }); + + console.log('=== SUMMARY ==='); + console.log('Total Members: ' + allMembers.length); + console.log('Export completed successfully'); + + // Brief delay before exit to allow UI to process the completion + setTimeout(() => { + process.exit(0); + }, 1000); + + } catch (error) { + console.log('FATAL ERROR: ' + error.message); + } +} + +// Execute the function to list members. +listMembers(); diff --git a/recipe-portal/recipes/members/list-all.md b/recipe-portal/recipes/members/list-all.md new file mode 100644 index 00000000..57659afa --- /dev/null +++ b/recipe-portal/recipes/members/list-all.md @@ -0,0 +1,67 @@ +# List All Members + +## API Endpoints Used + +- `GET /v2/members` ā [List Members](https://help.sigmacomputing.com/reference/listmembers) + +## Expected Output + +- **Console Response Display**: Clean, structured member information in Response tab +- **Member Details**: Name, email, type, member ID, creation/update dates, status +- **Pagination Support**: Handles multiple pages automatically +- **Summary**: Total count of members retrieved + +## Parameters + +- **LIMIT**: Number of results per page (max 1,000 per request) +- **MAX_PAGES**: Maximum number of pages to fetch (0 = all pages) + +## Use Cases + +- Generate complete organization user roster with pagination control +- Audit user accounts and permissions across large organizations +- Export member data for external systems with memory-efficient processing +- Get member IDs for other automation scripts + +## Pagination Pattern + +1. **First request**: Include `limit` parameter, no `page` parameter +2. **Subsequent requests**: Include both `limit` and `page` parameters +3. **Page tokens**: Use `nextPage` value from response for next request +4. **Completion**: Stop when `hasMore` is false or no `nextPage` token + +## Important Notes + +- Uses Sigma's standard pagination pattern with `limit` and `page` parameters +- Page tokens are handled automatically by the script +- Maximum of 1,000 results per page enforced by Sigma API +- Includes both active and inactive members +- Member IDs are only accessible via API, not visible in Sigma UI +- Results displayed in clean console format for easy viewing + +## ā ļø Important Usage Notes + +**This is a learning tool** designed to help you understand common Sigma API patterns. It is **not intended for production use**. + +## Export Limitations & Recommendations + +**Row Limits**: +- **Default**: 100,000 members (recommended for reliable downloads) +- **Maximum**: 1,000,000 members (Sigma API limit) +- **Portal Limit**: Single request only - no batching implemented + +**Download Reliability**: +- Large downloads may timeout due to network limitations +- Organizations with >100K members may experience reliability issues +- For production use, implement proper batching patterns + +**Batching Not Implemented**: This portal demonstrates single-request exports only. Production applications should implement batch processing for large datasets using: +- Multiple requests with proper pagination handling +- Proper error handling and retry logic +- Progress tracking across multiple API calls + +**For Production Use**: +- Implement proper batching for datasets >100K members +- Add timeout handling and retry mechanisms +- Use proper pagination patterns for large organizations +- Consider data consistency implications during multi-request exports \ No newline at end of file diff --git a/recipe-portal/recipes/members/master-script.js b/recipe-portal/recipes/members/master-script.js new file mode 100644 index 00000000..8f8e9d89 --- /dev/null +++ b/recipe-portal/recipes/members/master-script.js @@ -0,0 +1,145 @@ +// Title: Master Script +// Description: Complete 5-step member onboarding workflow: 1) Create new member, 2) Create workspace, 3) Grant workspace permission, 4) Add to team, 5) Grant connection permission + +// Parameters (only these should show in UI): +// - EMAIL: Base email for new member (unique email will be auto-generated) +// - NEW_MEMBER_FIRST_NAME: First name for new member +// - NEW_MEMBER_LAST_NAME: Last name for new member +// - NEW_MEMBER_TYPE: Account type for new member (admin, build, view, analyze, act) +// - TEAMID: The team ID to add the new member to +// - CONNECTIONID: The connection ID to grant permissions for +// - WORKSPACE_NAME: Name for the new workspace (optional - defaults to "Workspace for {firstName}") +// - PERMISSION: Permission level for workspace (optional - defaults to 'view') +// +// Note: MEMBERID and WORKSPACEID are set automatically by this script - do not expose in UI + +// Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// Import necessary scripts for 5-step workflow +const createNewMember = require('./create-new'); +const addNewWorkspace = require('./create-workspace'); +const grantWorkspacePermission = require('./create-workspace-permission'); +const addMemberToTeam = require('../teams/add-member-to-team'); +const addNewConnectionPermission = require('./create-connection-permission'); + +// Define an asynchronous function to handle the onboarding process +async function onboardNewMember() { + console.log('Starting the automated member onboarding workflow...'); + + // Validate required environment variables + const requiredVars = ['EMAIL', 'NEW_MEMBER_FIRST_NAME', 'NEW_MEMBER_LAST_NAME', 'NEW_MEMBER_TYPE', 'TEAMID', 'CONNECTIONID']; + const missingVars = requiredVars.filter(varName => !process.env[varName]); + + if (missingVars.length > 0) { + console.error('ā Missing required environment variables:', missingVars.join(', ')); + return; + } + + console.log('Environment Variables:'); + console.log(' Base Email:', process.env.EMAIL); + console.log(' First Name:', process.env.NEW_MEMBER_FIRST_NAME); + console.log(' Last Name:', process.env.NEW_MEMBER_LAST_NAME); + console.log(' Account Type:', process.env.NEW_MEMBER_TYPE); + console.log(' Team ID:', process.env.TEAMID); + console.log(' Connection ID:', process.env.CONNECTIONID); + console.log(' Workspace Name:', process.env.WORKSPACE_NAME || `Workspace for ${process.env.NEW_MEMBER_FIRST_NAME}`); + console.log(' Permission Level:', process.env.PERMISSION || 'view'); + console.log(''); + + try { + // Step 1: Create new member + console.log('=== STEP 1: Creating new member ==='); + const memberInfo = await createNewMember(); + if (!memberInfo || !memberInfo.memberId) { + throw new Error('Failed to create new member'); + } + console.log(`Step 1 complete: Member created with ID ${memberInfo.memberId}`); + console.log(''); + + // Set MEMBERID for subsequent scripts + process.env.MEMBERID = memberInfo.memberId; + + // Clear require cache for modules that read environment variables at load time + const memberScriptPaths = [ + require.resolve('./create-workspace-permission'), + require.resolve('../teams/add-member-to-team'), + require.resolve('./create-connection-permission') + ]; + memberScriptPaths.forEach(scriptPath => { + if (require.cache[scriptPath]) { + delete require.cache[scriptPath]; + } + }); + + // Step 2: Create new workspace for the member + console.log('=== STEP 2: Creating new workspace ==='); + const workspaceId = await addNewWorkspace(); + if (!workspaceId) { + throw new Error('Failed to create workspace or get workspace ID'); + } + console.log(`Step 2 complete: Workspace created with ID ${workspaceId}`); + console.log(''); + + // Step 3: Grant workspace permission to the new member + console.log('=== STEP 3: Granting workspace permission ==='); + const originalWorkspaceId = process.env.WORKSPACEID; + const originalTeamId = process.env.TEAMID; + + process.env.WORKSPACEID = workspaceId; + // Temporarily unset TEAMID so the permission is granted to the member, not the team + delete process.env.TEAMID; + + // Clear and re-require workspace permission module to pick up new WORKSPACEID and removed TEAMID + const workspacePermissionPath = require.resolve('./create-workspace-permission'); + if (require.cache[workspacePermissionPath]) { + delete require.cache[workspacePermissionPath]; + } + const grantWorkspacePermissionFresh = require('./create-workspace-permission'); + await grantWorkspacePermissionFresh(); + + // Restore original values + if (originalWorkspaceId) { + process.env.WORKSPACEID = originalWorkspaceId; + } else { + delete process.env.WORKSPACEID; + } + if (originalTeamId) { + process.env.TEAMID = originalTeamId; + } + console.log('Step 3 complete: Workspace permissions granted'); + console.log(''); + + // Step 4: Add member to team + console.log('=== STEP 4: Adding member to team ==='); + // Clear and re-require team module to pick up fresh MEMBERID + const teamPath = require.resolve('../teams/add-member-to-team'); + if (require.cache[teamPath]) { + delete require.cache[teamPath]; + } + const addMemberToTeamFresh = require('../teams/add-member-to-team'); + await addMemberToTeamFresh(); + console.log('Step 4 complete: Member added to team'); + console.log(''); + + // Step 5: Grant connection permission to the member + console.log('=== STEP 5: Granting connection permission ==='); + // Clear and re-require connection permission module to pick up fresh MEMBERID + const connectionPath = require.resolve('./create-connection-permission'); + if (require.cache[connectionPath]) { + delete require.cache[connectionPath]; + } + const addNewConnectionPermissionFresh = require('./create-connection-permission'); + await addNewConnectionPermissionFresh(); + console.log('Step 5 complete: Connection permissions granted'); + console.log(''); + + console.log('ONBOARDING COMPLETED SUCCESSFULLY'); + console.log(`New member ${memberInfo.email} (${memberInfo.memberId}) has been fully onboarded.`); + } catch (error) { + console.error('Onboarding failed:', error.message || error); + } +} + +// Execute the function to start the onboarding process +onboardNewMember(); diff --git a/recipe-portal/recipes/members/master-script.md b/recipe-portal/recipes/members/master-script.md new file mode 100644 index 00000000..aa35fc9a --- /dev/null +++ b/recipe-portal/recipes/members/master-script.md @@ -0,0 +1,24 @@ +# Master Script + +## API Endpoints Used +This script orchestrates multiple member management operations by calling other scripts in sequence. + +## Expected Output + +- Sequential execution results from each called script +- Comprehensive member onboarding workflow completion +- Status updates for each step in the process + +## Use Cases + +- Complete new member onboarding workflow +- Automated user provisioning with full setup +- Consistent multi-step user configuration +- Template for complex member management operations + +## Important Notes + +- ā ļø Requires manual creation of new member first using create-new.js +- Must update .env with returned memberId before running this script +- Executes multiple scripts in predetermined order +- Each step depends on successful completion of previous steps \ No newline at end of file diff --git a/recipe-portal/recipes/members/recent-workbooks.js b/recipe-portal/recipes/members/recent-workbooks.js new file mode 100644 index 00000000..c24c0012 --- /dev/null +++ b/recipe-portal/recipes/members/recent-workbooks.js @@ -0,0 +1,78 @@ +// Title: Recent Workbooks +// Description: This script returns all the workbooks for a specified member, ordered by most recent. + +// 1: Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// 2: Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// 3: Import Axios for making HTTP requests +const axios = require('axios'); + +// 4: Load use-case specific variables from environment variables +const memberId = process.env.MEMBERID; // The unique identifier of the member whose type you want to update +const baseURL = process.env.baseURL; // Your base URL + +// Define an asynchronous function to list recent documents and folders accessible to a specific member. +async function listRecentDocuments() { + // Obtain a bearer token using the previously imported function. + const accessToken = await getBearerToken(); + // If unable to obtain a token, log an error message and exit the function. + if (!accessToken) { + console.log('Failed to obtain Bearer token.'); + return; + } + + // Construct the URL for accessing the API endpoint that lists recent documents and folders. + const recentsURL = `${baseURL}/members/${memberId}/files/recents`; + + console.log(`URL sent to Sigma: ${recentsURL}`); // Log the constructed URL before sending the request + + try { + // Make a GET request to the specified URL, including the bearer token in the request headers for authentication. + const response = await axios.get(recentsURL, { + headers: { + 'Authorization': `Bearer ${accessToken}`, + 'Accept': 'application/json' // Specify the accepted response format + } + }); + + // Extract the entries array from the response data, which contains the documents and folders. + const entries = response.data.entries; + + // Process each entry to extract and keep only the name, permission, and lastInteractionAt fields. + // Filter out explorations since they are not saved workbooks. + // Then, sort the processed entries by the lastInteractionAt field in descending order. + const processedEntries = entries + .map(({ name, permission, lastInteractionAt }) => ({ + name, + permission, + lastInteractionAt + })) + .filter(entry => entry.name !== 'Exploration') // Exclude explorations + .sort((a, b) => new Date(b.lastInteractionAt) - new Date(a.lastInteractionAt)); + + // Log the processed and sorted entries to the console in a readable JSON format. + console.log("Recent workbooks (explorations excluded):", JSON.stringify(processedEntries, null, 2)); + } catch (error) { + // If the request fails, provide better error handling + if (error.response && error.response.data) { + const errorData = error.response.data; + + if (errorData.code === 'service_error') { + console.error('Service error - this may occur for administrative users or system accounts:'); + console.error(` Error: ${errorData.message}`); + console.error(` Request ID: ${errorData.requestId}`); + console.error(' Note: Administrative users may not have recent workbook data available.'); + } else { + console.error('Error listing recent workbooks:', errorData); + } + } else { + console.error('Network error listing recent workbooks:', error.message); + } + } +} + +// Execute the function to list recent documents and folders. +listRecentDocuments(); \ No newline at end of file diff --git a/recipe-portal/recipes/members/recent-workbooks.md b/recipe-portal/recipes/members/recent-workbooks.md new file mode 100644 index 00000000..b6f30e4f --- /dev/null +++ b/recipe-portal/recipes/members/recent-workbooks.md @@ -0,0 +1,24 @@ +# Recent Workbooks + +## API Endpoints Used + +- `GET /v2/workbooks` ā [List Workbooks](https://help.sigmacomputing.com/reference/listworkbooks) + +## Expected Output + +- List of workbooks accessible to the specified member +- Workbooks sorted by most recent access/modification +- Workbook details including name, ID, owner, and last updated timestamp + +## Use Cases + +- Track user activity and recently accessed content +- Audit individual user workbook access +- Generate user activity reports +- Monitor content usage patterns + +## Important Notes + +- Results are filtered to show only workbooks accessible to the specified MEMBERID +- Sorting is by most recent activity (access or modification) +- Includes both owned and shared workbooks \ No newline at end of file diff --git a/recipe-portal/recipes/members/update.js b/recipe-portal/recipes/members/update.js new file mode 100644 index 00000000..9dbf0d02 --- /dev/null +++ b/recipe-portal/recipes/members/update.js @@ -0,0 +1,52 @@ +// Title: Update Member +// Description: This script updates the account type for a member, based on the memberId defined in the .env file. + +// 1: Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// 2: Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// 3: Import Axios for making HTTP requests +const axios = require('axios'); + +// 4: Load use-case specific variables from environment variables +const newMemberType = process.env.NEW_MEMBER_TYPE; // The new account type you want to assign +const memberId = process.env.MEMBERID; // The unique identifier of the member whose type you want to update +const baseURL = process.env.baseURL; // Your base URL + +// Define an asynchronous function to update a member's account type +async function updateMemberAccountType() { + const accessToken = await getBearerToken(); + if (!accessToken) { + console.log('Failed to obtain Bearer token.'); + return; + } + + try { + // Construct the request URL using the base URL and member ID + const requestURL = `${baseURL}/members/${memberId}`; + + // Log the constructed URL before sending the request + console.log(`URL sent to Sigma: ${requestURL}`); + + // Make a PATCH request to the API to update the member's account type + const response = await axios.patch(requestURL, { + memberType: newMemberType, // Data payload for the PATCH request + }, { + headers: { + 'Content-Type': 'application/json', // Indicate that the request body is JSON + 'Authorization': `Bearer ${accessToken}` // Authenticate the request + } + }); + + // Log the response data to indicate success + console.log('User account type updated successfully:', JSON.stringify(response.data, null, 2)); + } catch (error) { + // Log detailed error information if the request fails + console.error('Error updating member account type:', error.response ? error.response.data : error); + } +} + +// Execute the function to update a member's account type +updateMemberAccountType(); \ No newline at end of file diff --git a/recipe-portal/recipes/members/update.md b/recipe-portal/recipes/members/update.md new file mode 100644 index 00000000..f475fd8f --- /dev/null +++ b/recipe-portal/recipes/members/update.md @@ -0,0 +1,24 @@ +# Update Member + +## API Endpoints Used + +- `PATCH /v2/members/{memberId}` ā [Update Member](https://help.sigmacomputing.com/reference/updatemember) + +## Expected Output + +- Confirmation of successful account type change +- Updated member profile showing new account type +- Before and after account type comparison + +## Use Cases + +- Promote users from Viewer to Creator or Admin +- Adjust user permissions based on role changes +- Bulk account type updates for organizational changes +- Downgrade permissions for security compliance + +## Important Notes + +- Changes take effect immediately +- Account type affects user's permissions and access levels +- Valid account types: Admin, Creator, Viewer \ No newline at end of file diff --git a/recipe-portal/recipes/members_output.json b/recipe-portal/recipes/members_output.json new file mode 100644 index 00000000..b07f4d04 --- /dev/null +++ b/recipe-portal/recipes/members_output.json @@ -0,0 +1,92 @@ +[ + { + "organizationId": "019b4811-728c-4246-91bb-c9c0de9d5f76", + "memberId": "16k9rhNYYUqGcyQD4JgTeZL4RNyKU", + "memberType": "View", + "firstName": "QuickStarts", + "lastName": "View", + "email": "phil+view@sigmacomputing.com", + "profileImgUrl": null, + "createdBy": "16k9rhNYYUqGcyQD4JgTeZL4RNyKU", + "updatedBy": "yRn1UFV8ngVWBM1Hgrl51h7MS8uow", + "createdAt": "2025-03-03T21:06:55.713Z", + "updatedAt": "2025-04-12T20:57:59.776Z", + "homeFolderId": "a1347d93-9c0b-4d7d-b85d-16ac50074763", + "userKind": "internal" + }, + { + "organizationId": "019b4811-728c-4246-91bb-c9c0de9d5f76", + "memberId": "1A9vCmI48FYpdlUAtWGhULnp7yoGA", + "memberType": "View", + "firstName": "Build", + "lastName": "QuickStarts", + "email": "build.embed.qs@example.com", + "profileImgUrl": null, + "createdBy": "1A9vCmI48FYpdlUAtWGhULnp7yoGA", + "updatedBy": "yRn1UFV8ngVWBM1Hgrl51h7MS8uow", + "createdAt": "2025-06-27T18:17:01.925Z", + "updatedAt": "2025-08-19T16:07:01.070Z", + "homeFolderId": "0e7d7c9b-ab28-45b9-86f3-e2efdac35650", + "userKind": "embed" + }, + { + "organizationId": "019b4811-728c-4246-91bb-c9c0de9d5f76", + "memberId": "4KlLw3hqxMnWY9WJ98uRp5ph0MSWk", + "memberType": "View", + "firstName": "Phil", + "lastName": "Test10", + "email": "phil+test10+08201156@sigmacomputing.com", + "profileImgUrl": null, + "createdBy": "4KlLw3hqxMnWY9WJ98uRp5ph0MSWk", + "updatedBy": "yRn1UFV8ngVWBM1Hgrl51h7MS8uow", + "createdAt": "2025-08-20T15:56:02.101Z", + "updatedAt": "2025-08-20T19:11:45.747Z", + "homeFolderId": "4460070c-6f87-4a18-a183-8310d1fe9f4f", + "userKind": "internal" + }, + { + "organizationId": "019b4811-728c-4246-91bb-c9c0de9d5f76", + "memberId": "7irG81aop1t7A07nsMMnJHcnsCyP4", + "memberType": "build", + "firstName": "QuickStarts", + "lastName": "Build", + "email": "phil+build@sigmacomputing.com", + "profileImgUrl": null, + "createdBy": "7irG81aop1t7A07nsMMnJHcnsCyP4", + "updatedBy": "7irG81aop1t7A07nsMMnJHcnsCyP4", + "createdAt": "2025-03-03T21:45:43.007Z", + "updatedAt": "2025-04-12T21:14:34.228Z", + "homeFolderId": "55c9c2c9-3a5f-4207-aa6e-a6dd270f4254", + "userKind": "internal" + }, + { + "organizationId": "019b4811-728c-4246-91bb-c9c0de9d5f76", + "memberId": "SigmaSchedulerRobot", + "memberType": "admin", + "firstName": "Scheduler", + "lastName": "User", + "email": "scheduler-robot@sigmacomputing.com", + "profileImgUrl": null, + "createdBy": "SigmaSchedulerRobot", + "updatedBy": "SigmaSchedulerRobot", + "createdAt": "2025-03-03T15:31:34.593Z", + "updatedAt": "2025-03-03T15:31:34.593Z", + "homeFolderId": "cced88eb-4d6a-433f-8ebb-1a4e9d65413a", + "userKind": "internal" + }, + { + "organizationId": "019b4811-728c-4246-91bb-c9c0de9d5f76", + "memberId": "TQHoVSKtra4BMo8wZxx4hrxpxk5Tw", + "memberType": "admin", + "firstName": "Vandit", + "lastName": "Patel", + "email": "vandit@sigmacomputing.com", + "profileImgUrl": null, + "createdBy": "TQHoVSKtra4BMo8wZxx4hrxpxk5Tw", + "updatedBy": "TQHoVSKtra4BMo8wZxx4hrxpxk5Tw", + "createdAt": "2025-07-25T18:32:33.955Z", + "updatedAt": "2025-07-25T18:32:33.955Z", + "homeFolderId": "62daf41f-48b3-4890-9b9c-2960fc94cb9a", + "userKind": "internal" + } +] \ No newline at end of file diff --git a/recipe-portal/recipes/package-lock.json b/recipe-portal/recipes/package-lock.json new file mode 100644 index 00000000..6ad2ba8f --- /dev/null +++ b/recipe-portal/recipes/package-lock.json @@ -0,0 +1,304 @@ +{ + "name": "sigma-api-recipes", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "sigma-api-recipes", + "version": "1.0.0", + "dependencies": { + "axios": "^0.30.0", + "dotenv": "^10.0.0" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "license": "MIT" + }, + "node_modules/axios": { + "version": "0.30.1", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.30.1.tgz", + "integrity": "sha512-2XabsR1u0/B6OoKy57/xJmPkQiUvdoV93oW4ww+Xjee7C2er/O5U77lvqycDkT2VQDtfjYcjw8ZV8GDaoqwjHQ==", + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.4", + "form-data": "^4.0.4", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/dotenv": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-10.0.0.tgz", + "integrity": "sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=10" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/follow-redirects": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", + "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/form-data": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "license": "MIT" + } + } +} diff --git a/recipe-portal/recipes/package.json b/recipe-portal/recipes/package.json new file mode 100644 index 00000000..b4c58544 --- /dev/null +++ b/recipe-portal/recipes/package.json @@ -0,0 +1,10 @@ +{ + "name": "sigma-api-recipes", + "version": "1.0.0", + "description": "Sample API Scripts for Sigma's REST API", + "dependencies": { + "axios": "^0.30.0", + "dotenv": "^10.0.0" + } + } + \ No newline at end of file diff --git a/recipe-portal/recipes/teams/add-member-to-team.js b/recipe-portal/recipes/teams/add-member-to-team.js new file mode 100644 index 00000000..20c1b09c --- /dev/null +++ b/recipe-portal/recipes/teams/add-member-to-team.js @@ -0,0 +1,44 @@ +// Title: Add Member to Team +// Description: This script adds a member to a team. + +require('dotenv').config({ path: 'recipes/.env' }); + +const getBearerToken = require('../get-access-token'); +const axios = require('axios'); + +const baseURL = process.env.baseURL; // Your base URL +const memberId = process.env.MEMBERID; // The unique identifier of the member you're adding to the team +const teamId = process.env.TEAMID; // The unique identifier of the team + +async function addMemberToTeam() { + const accessToken = await getBearerToken(); + if (!accessToken) { + console.error('Failed to obtain Bearer token.'); + return; + } + + const requestURL = `${baseURL}/teams/${teamId}/members`; + console.log(`URL sent to Sigma: ${requestURL}`); + + try { + // Adjusting for the PATCH request and format based on the curl example + const response = await axios.patch(requestURL, { + add: [memberId] // The memberId should be in an array + }, { + headers: { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${accessToken}` + } + }); + + console.log('Member successfully added to team:', JSON.stringify(response.data, null, 2)); + } catch (error) { + console.error('Error adding member to team:', error.response ? JSON.stringify(error.response.data, null, 2) : error.message); + } +} + +if (require.main === module) { + addMemberToTeam(); +} + +module.exports = addMemberToTeam; diff --git a/recipe-portal/recipes/teams/add-member-to-team.md b/recipe-portal/recipes/teams/add-member-to-team.md new file mode 100644 index 00000000..3880ee29 --- /dev/null +++ b/recipe-portal/recipes/teams/add-member-to-team.md @@ -0,0 +1,24 @@ +# Add Member to Team + +## API Endpoints Used + +- `PATCH /v2/teams/{teamId}/members` ā [Update Team Members](https://help.sigmacomputing.com/reference/updateteammembers) + +## Expected Output + +- Confirmation of successful team member addition +- Updated team membership details +- Member and team information summary + +## Use Cases + +- Assign new employees to appropriate teams +- Move users between teams during reorganization +- Grant team-based access to specific projects +- Manage team-based permissions and workflows + +## Important Notes + +- Requires valid MEMBERID and TEAMID in environment variables +- Member must be an active user in the organization +- Team membership affects access to team-specific resources \ No newline at end of file diff --git a/recipe-portal/recipes/teams/bulk-assign-team.js b/recipe-portal/recipes/teams/bulk-assign-team.js new file mode 100644 index 00000000..e3bff061 --- /dev/null +++ b/recipe-portal/recipes/teams/bulk-assign-team.js @@ -0,0 +1,77 @@ +// Title: Bulk Assign Team +// Description: This script bulk assigns existing members to a team, based on the member's email, which is matched from the member-emails file. + +// Load required modules and environment variables for script configuration. +require('dotenv').config({ path: 'recipes/.env' }); +const getBearerToken = require('../get-access-token'); // Function to obtain an authentication token. +const axios = require('axios'); // HTTP client for making requests to Sigma's API. +const fs = require('fs'); // File system module to read the list of emails from a file. +const path = require('path'); // Module for handling file paths. + +// Environment variables loaded from the .env file. +const baseURL = process.env.baseURL; // Base URL for Sigma's API endpoints. +const teamId = process.env.TEAMID; // Target team ID for assigning members. +const emailListPath = path.join(__dirname, '..', '.member-emails'); // Path to the file containing member emails. + +// Function to find a member's ID by their email address. +async function findMemberIdByEmail(email, token) { + const encodedEmail = encodeURIComponent(email); // Encode email to ensure it is URL-safe. + const requestUrl = `${baseURL}/members?search=${encodedEmail}`; // Construct the request URL. + console.log(`Searching for member by email with URL: ${requestUrl}`); // Log the request for debugging. + try { + const response = await axios.get(requestUrl, { + headers: { Authorization: `Bearer ${token}` }, // Include the bearer token for authentication. + }); + // Return the first matching member ID if found. + const members = response.data.entries || []; + const matchingMember = members.find(member => member.email.toLowerCase() === email.toLowerCase()); + return matchingMember ? matchingMember.memberId : null; + } catch (error) { + console.error(`Error searching for member by email (${email}):`, error); + return null; // Return null if an error occurs or no member is found. + } +} + +// Function to add/remove a member to a specified team by their member ID. +async function addMemberToTeam(memberId, teamId, token) { + const requestUrl = `${baseURL}/teams/${teamId}/members`; // API endpoint for adding a member to a team. + const payload = { add: [memberId], remove: [] }; // Payload specifying the member to add (and none to remove). + const headers = { Authorization: `Bearer ${token}`, 'Content-Type': 'application/json' }; // Request headers. + + // Log the request details for debugging. + console.log(`Adding member to team with URL: ${requestUrl}`); + // console.log(`Headers:`, JSON.stringify(headers, null, 2)); + console.log(`Payload:`, JSON.stringify(payload, null, 2)); + + try { + const response = await axios.patch(requestUrl, payload, { headers }); + console.log(`Member ${memberId} added to team ${teamId}. Response:`, response.data); + } catch (error) { + console.error(`Error adding member ${memberId} to team ${teamId}:`, error.response ? error.response.data : error); + } +} + +// Main function to process member emails and assign them to the specified team. +async function processMembers(teamId, token) { + const emails = fs.readFileSync(emailListPath, 'utf-8').split(','); // Read and split the list of emails. + for (const email of emails) { + const memberId = await findMemberIdByEmail(email.trim(), token); // Get member ID by email. + if (memberId) { + await addMemberToTeam(memberId, teamId, token); // Add the member to the team. + } else { + console.log(`Member not found for email: ${email}`); // Log if no member ID is found for an email. + } + } +} + +// Entry point of the script. +async function main() { + const token = await getBearerToken(); // Fetch the bearer token. + if (!token) { + console.error('Failed to obtain bearer token.'); + return; + } + await processMembers(teamId, token); // Process member assignments. +} + +main().catch(console.error); // Execute the main function and catch any errors. diff --git a/recipe-portal/recipes/teams/bulk-assign-team.md b/recipe-portal/recipes/teams/bulk-assign-team.md new file mode 100644 index 00000000..b88a85ac --- /dev/null +++ b/recipe-portal/recipes/teams/bulk-assign-team.md @@ -0,0 +1,34 @@ +# Bulk Assign Team + +## API Endpoints Used + +- `GET /v2/members?search={email}` ā [List Members](https://help.sigmacomputing.com/reference/listmembers) +- `PATCH /v2/teams/{teamId}/members` ā [Update Team Members](https://help.sigmacomputing.com/reference/updateteammembers) + +## Required Setup + +1. Create email list file: Create a file named `.member-emails` in the `/recipes` directory +2. Add email addresses: Add one email address per line (plain text format) +3. Example file content: + ``` + user1@company.com + user2@company.com + admin@company.com + ``` + +## Expected Output + +- Console log showing search and assignment results for each email +- Success confirmation for members found and assigned to the team +- Error messages for emails that don't match existing Sigma users + +## Use Cases + +- Onboard multiple new team members at once +- Reassign groups of users to different teams during reorganization +- Bulk team assignments from external user lists (HR systems, etc.) + +## Important Notes + +- ā ļø Email matching: Script will skip emails that don't match existing Sigma users +- Performance: Bulk operations may take time with large email lists due to individual API calls \ No newline at end of file diff --git a/recipe-portal/recipes/teams/bulk-remove-team.js b/recipe-portal/recipes/teams/bulk-remove-team.js new file mode 100644 index 00000000..96278f73 --- /dev/null +++ b/recipe-portal/recipes/teams/bulk-remove-team.js @@ -0,0 +1,81 @@ +// Title: Bulk Remove Team +// Description: This script bulk removes existing members from a team, based on the member's email, which is matched from the member-emails file. + +// Load required modules and environment variables for script configuration. +require('dotenv').config({ path: 'recipes/.env' }); +const getBearerToken = require('../get-access-token'); // Function to obtain an authentication token. +const axios = require('axios'); // HTTP client for making requests to Sigma's API. +const fs = require('fs'); // File system module to read the list of emails from a file. +const path = require('path'); // Module for handling file paths. + +// Environment variables loaded from the .env file. +const baseURL = process.env.baseURL; // Base URL for Sigma's API endpoints. +const teamId = process.env.TEAMID; // Target team ID for assigning members. +const emailListPath = path.join(__dirname, '..', '.member-emails'); // Path to the file containing member emails. + +// Function to find a member's ID by their email address. +async function findMemberIdByEmail(email, token) { + const encodedEmail = encodeURIComponent(email); // Encode email to ensure it is URL-safe. + const requestUrl = `${baseURL}/members?search=${encodedEmail}`; // Construct the request URL. + console.log(`Searching for member by email with URL: ${requestUrl}`); // Log the request for debugging. + try { + const response = await axios.get(requestUrl, { + headers: { Authorization: `Bearer ${token}` }, // Include the bearer token for authentication. + }); + // Return the first matching member ID if found. + const members = response.data.entries || []; + const matchingMember = members.find(member => member.email.toLowerCase() === email.toLowerCase()); + return matchingMember ? matchingMember.memberId : null; + } catch (error) { + console.error(`Error searching for member by email (${email}):`, error); + return null; // Return null if an error occurs or no member is found. + } +} + +// Function to add/remove a member to a specified team by their member ID. + // change logging messages to remove instead of add + async function addMemberToTeam(memberId, teamId, token) { + const requestUrl = `${baseURL}/teams/${teamId}/members`; // API endpoint for adding a member to a team. + + //Change the job to remove a member: + const payload = { add: [], remove: [memberId] }; // Payload specifying the member to add (and none to remove). + const headers = { Authorization: `Bearer ${token}`, 'Content-Type': 'application/json' }; // Request headers. + + // Log the request details for debugging. + + console.log(`Removed member to team with URL: ${requestUrl}`); + // console.log(`Headers:`, JSON.stringify(headers, null, 2)); + console.log(`Payload:`, JSON.stringify(payload, null, 2)); + + try { + const response = await axios.patch(requestUrl, payload, { headers }); + console.log(`Member ${memberId} removed from team ${teamId}. Response:`, response.data); + } catch (error) { + console.error(`Error removing member ${memberId} to team ${teamId}:`, error.response ? error.response.data : error); + } + } + +// Main function to process member emails and assign them to the specified team. +async function processMembers(teamId, token) { + const emails = fs.readFileSync(emailListPath, 'utf-8').split(','); // Read and split the list of emails. + for (const email of emails) { + const memberId = await findMemberIdByEmail(email.trim(), token); // Get member ID by email. + if (memberId) { + await addMemberToTeam(memberId, teamId, token); // Add the member to the team. + } else { + console.log(`Member not found for email: ${email}`); // Log if no member ID is found for an email. + } + } +} + +// Entry point of the script. +async function main() { + const token = await getBearerToken(); // Fetch the bearer token. + if (!token) { + console.error('Failed to obtain bearer token.'); + return; + } + await processMembers(teamId, token); // Process member assignments. +} + +main().catch(console.error); // Execute the main function and catch any errors. diff --git a/recipe-portal/recipes/teams/bulk-remove-team.md b/recipe-portal/recipes/teams/bulk-remove-team.md new file mode 100644 index 00000000..d0fdb2b8 --- /dev/null +++ b/recipe-portal/recipes/teams/bulk-remove-team.md @@ -0,0 +1,34 @@ +# Bulk Remove Team + +## API Endpoints Used + +- `GET /v2/members?search={email}` ā [List Members](https://help.sigmacomputing.com/reference/listmembers) +- `PATCH /v2/teams/{teamId}/members` ā [Update Team Members](https://help.sigmacomputing.com/reference/updateteammembers) + +## Required Setup + +1. Create email list file: Create a file named `.member-emails` in the `/recipes` directory +2. Add email addresses: Add one email address per line (plain text format) +3. Example file content: + ``` + user1@company.com + user2@company.com + admin@company.com + ``` + +## Expected Output + +- Console log showing search and removal results for each email +- Success confirmation for members found and removed from the team +- Error messages for emails that don't match existing Sigma users + +## Use Cases + +- Remove multiple team members during team restructuring +- Bulk removal of users who changed departments +- Clean up team membership from external user lists + +## Important Notes + +- ā ļø Email matching: Script will skip emails that don't match existing Sigma users +- Performance: Bulk operations may take time with large email lists due to individual API calls \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/PlugsSalesPerformanceDashboard.pdf.temp b/recipe-portal/recipes/workbooks/PlugsSalesPerformanceDashboard.pdf.temp new file mode 100644 index 00000000..7b49b05d Binary files /dev/null and b/recipe-portal/recipes/workbooks/PlugsSalesPerformanceDashboard.pdf.temp differ diff --git a/recipe-portal/recipes/workbooks/all-input-tables.js b/recipe-portal/recipes/workbooks/all-input-tables.js new file mode 100644 index 00000000..133ac0ef --- /dev/null +++ b/recipe-portal/recipes/workbooks/all-input-tables.js @@ -0,0 +1,84 @@ +// Title: All Input Tables +// Description: This script lists all instances of input-tables, across all workbooks, where they exist. +// Utilizes Sigma's API as documented in their Swagger documentation. + +// Load necessary dependencies and set up the environment +require('dotenv').config({ path: 'recipes/.env' }); // Load environment variables for configuration +const getBearerToken = require('../get-access-token'); // Import function to obtain a bearer token for API authentication +const axios = require('axios'); // Import Axios for making HTTP requests + +const baseURL = process.env.baseURL; // Load the base URL for API requests from environment variables + +// Handle any unhandled promise rejections to prevent the script from failing silently +process.on('unhandledRejection', error => { + console.error('Unhandled promise rejection:', error); +}); + +// Function to fetch and log details of 'input-table' elements within a specific workbook +async function fetchElementsOfWorkbook(workbook, accessToken) { + try { + // Fetch all pages within the workbook + const pagesResponse = await axios.get(`${baseURL}/workbooks/${workbook.workbookId}/pages`, { + headers: { 'Authorization': `Bearer ${accessToken}` }, // Include the authorization header + }); + + // Iterate through each page in the workbook + for (const page of pagesResponse.data.entries) { + const elementsUrl = `${baseURL}/workbooks/${workbook.workbookId}/pages/${page.pageId}/elements`; // Construct the URL to fetch elements + try { + // Fetch elements for the current page + const elementsResponse = await axios.get(elementsUrl, { + headers: { 'Authorization': `Bearer ${accessToken}` }, // Include the authorization header + }); + + // Filter for elements of type 'input-table' + const inputTableElements = elementsResponse.data.entries.filter(element => element.type === 'input-table'); + if (inputTableElements.length > 0) { + // Log workbook and page details if 'input-table' elements are found + console.log(`Workbook: "${workbook.name}", Path: "${workbook.path}/${workbook.workbookId}", Page: "${page.name}"`); + inputTableElements.forEach(element => { + // Log details for each 'input-table' element + console.log(` - Input Table: ${element.name}, Element ID: ${element.elementId}, Latest Version: ${workbook.latestVersion}`); + }); + } + } catch (error) { + console.error(`Error fetching elements for page "${page.name}" in workbook "${workbook.name}":`, error.response ? error.response.data : error.message); + } + } + } catch (error) { + console.error(`Error fetching pages for workbook "${workbook.name}":`, error.response ? error.response.data : error.message); + } +} + +// Main function to list workbooks and find 'input-table' elements within them +async function listWorkbooksAndFindInputTables() { + console.log('Starting to search for input-table elements across all workbooks...'); + const accessToken = await getBearerToken(); // Obtain the bearer token for API authentication + if (!accessToken) { + console.error('Failed to obtain Bearer token.'); // Log an error if the token cannot be obtained + return; + } + + try { + // Fetch all workbooks accessible to the token + const workbooksResponse = await axios.get(`${baseURL}/workbooks`, { + headers: { 'Authorization': `Bearer ${accessToken}` }, // Include the authorization header + }); + + // Iterate through each workbook to search for 'input-table' elements + for (const workbook of workbooksResponse.data.entries) { + await fetchElementsOfWorkbook(workbook, accessToken); // Process each workbook + } + } catch (error) { + console.error('Error fetching workbooks:', error.response ? error.response.data : error.message); + } + console.log('Completed searching for input-table elements.'); // Indicate completion of the script +} + +// Execute the main function if the script is run directly +if (require.main === module) { + listWorkbooksAndFindInputTables(); +} + +// Export the main function for use in other modules +module.exports = listWorkbooksAndFindInputTables; \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/all-input-tables.md b/recipe-portal/recipes/workbooks/all-input-tables.md new file mode 100644 index 00000000..63697f3e --- /dev/null +++ b/recipe-portal/recipes/workbooks/all-input-tables.md @@ -0,0 +1,26 @@ +# All Input Tables + +## API Endpoints Used + +- `GET /v2/workbooks` ā [List Workbooks](https://help.sigmacomputing.com/reference/listworkbooks) +- `GET /v2/workbooks/{workbookId}` ā [Get Workbook](https://help.sigmacomputing.com/reference/getworkbook) + +## Expected Output + +- Complete inventory of all input tables across all workbooks +- Location information showing which workbook contains each input table +- Input table configuration details and data source information + +## Use Cases + +- Audit all input tables in the organization +- Track data input dependencies across workbooks +- Monitor input table usage and configuration +- Generate input table inventory reports + +## Important Notes + +- Scans all workbooks to find input table instances +- Shows input table locations and configurations +- Useful for data governance and dependency mapping +- Process time depends on total number of workbooks \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/copy-workbook-folder.js b/recipe-portal/recipes/workbooks/copy-workbook-folder.js new file mode 100644 index 00000000..2ad3d877 --- /dev/null +++ b/recipe-portal/recipes/workbooks/copy-workbook-folder.js @@ -0,0 +1,104 @@ +// Title: Copy Workbook Folder +// Description: This script copies a workbook folder from one location to another. + +// Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// Import Axios for making HTTP requests +const axios = require('axios'); + +// Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Base URL for the Sigma API +const workbookId = process.env.WORKBOOK_ID; // Workbook ID to copy +const memberId = process.env.MEMBERID; // ID of the member whose "My Documents" folder we'll use +const newWorkbookName = process.env.NEW_WORKBOOK_NAME || 'Copy of Workbook'; // Name for the copied workbook +const newWorkbookDescription = process.env.NEW_WORKBOOK_DESCRIPTION || 'Copied workbook'; // Description for the copied workbook + +// Function to retrieve the ID of the member's "My Documents" folder +async function getMyDocumentsFolderId(accessToken) { + try { + // Send the request to retrieve the member's details + const response = await axios.get( + `${baseURL}/members/${memberId}`, + { headers: { Authorization: `Bearer ${accessToken}` } } + ); + + // Extract the ID of the member's "My Documents" folder + const homeFolderId = response.data.homeFolderId; + + // Log the retrieved folder ID + console.log('Retrieved "My Documents" folder ID:', homeFolderId); + + return homeFolderId; + } catch (error) { + // Log any errors that occur during the process + console.error('Failed to retrieve "My Documents" folder ID:', error.response ? error.response.data : error); + return null; + } +} + +// Function to copy the workbook to the specified folder +async function copyWorkbook(accessToken, destinationFolderId) { + try { + // Define the request payload to copy the workbook + const copyPayload = { + name: newWorkbookName, // Name from environment variable or default + description: newWorkbookDescription, // Description from environment variable or default + ownerId: memberId, // Specify the ID of the user who will own the copied workbook + destinationFolderId: destinationFolderId // Use the passed destinationFolderId argument + }; + + console.log(`Copying workbook to: "${newWorkbookName}"`); + + // Send the request to copy the workbook + const response = await axios.post( + `${baseURL}/workbooks/${workbookId}/copy`, + copyPayload, + { headers: { Authorization: `Bearer ${accessToken}`, 'Content-Type': 'application/json' } } + ); + + // Log the success message and copied workbook details + console.log('Workbook copy initiated successfully.'); + console.log('Copied workbook details:', response.data); + + // Return the ID of the copied workbook for further processing if needed + return response.data.workbookId; + } catch (error) { + // Log any errors that occur during the process + console.error('Failed to copy workbook:', error.response ? error.response.data : error); + return null; + } +} + +// Main function to execute the workflow +async function main() { + // Obtain the bearer token for authentication + const accessToken = await getBearerToken(); + if (!accessToken) { + console.error('Failed to obtain bearer token.'); + return; + } + + // Retrieve the ID of the member's "My Documents" folder + const myDocumentsFolderId = await getMyDocumentsFolderId(accessToken); + if (!myDocumentsFolderId) { + console.error('Failed to retrieve "My Documents" folder ID.'); + return; + } + + // Copy the workbook and place it in the "My Documents" folder + const copiedWorkbookId = await copyWorkbook(accessToken, myDocumentsFolderId); // Pass myDocumentsFolderId here + if (!copiedWorkbookId) { + console.error('Failed to copy workbook.'); + return; + } + + // Perform any additional actions with the copied workbook if needed + console.log(`Workbook successfully copied with ID: ${copiedWorkbookId}`); +} + +// Execute the main function +main(); \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/copy-workbook-folder.md b/recipe-portal/recipes/workbooks/copy-workbook-folder.md new file mode 100644 index 00000000..aea1fb28 --- /dev/null +++ b/recipe-portal/recipes/workbooks/copy-workbook-folder.md @@ -0,0 +1,26 @@ +# Copy Workbook Folder + +## API Endpoints Used + +- `POST /v2/workbooks/{workbookId}/copy` ā [Copy Workbook](https://help.sigmacomputing.com/reference/copyworkbook) +- `GET /v2/members/{memberId}` ā [Get Member Details](https://help.sigmacomputing.com/reference/getmember) + +## Expected Output + +- Confirmation of successful workbook folder copy operation +- Details of source and destination folder locations +- New folder ID and path information + +## Use Cases + +- Duplicate workbook folders for different environments +- Create backup copies of important workbook collections +- Set up template folders for new projects or teams +- Organize content across different workspace structures + +## Important Notes + +- Copies entire folder structure including all contained workbooks +- Requires valid WORKBOOK_ID (folder) and MEMBERID (destination owner) +- Preserves folder hierarchy and workbook relationships +- New owner gets full control over copied content \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/export-workbook-element-csv.js b/recipe-portal/recipes/workbooks/export-workbook-element-csv.js new file mode 100644 index 00000000..fc0b7fc1 --- /dev/null +++ b/recipe-portal/recipes/workbooks/export-workbook-element-csv.js @@ -0,0 +1,291 @@ +// Title: Export Workbook Element CSV +// Description: This script triggers an export to CSV job with date range parameters, and downloads the export once ready. +// +// PREREQUISITES: +// - Workbook must contain the target element (table/chart) to export +// - For date filtering: Workbook must have a page control with ID "API-Date-Filter" +// (Control ID can be found in workbook edit mode > select control > Properties panel) +// - Element must be compatible with CSV export format (tables work best) + +console.log('CSV Export script loaded'); + +// Environment variables are provided dynamically by the UI - no .env file needed + +// Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// Import Axios for making HTTP requests +const axios = require('axios'); + +// Load necessary modules for file handling +const fs = require('fs'); +const path = require('path'); + +// Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Base URL for the Sigma API +const workbookId = process.env.WORKBOOK_ID; // Workbook ID from which to export data +const elementId = process.env.ELEMENT_ID; // Element ID within the workbook to target for export +const startDate = process.env.START_DATE; // Start date for export range (YYYY-MM-DD format) +const endDate = process.env.END_DATE; // End date for export range (YYYY-MM-DD format) +const rowLimit = process.env.LIMIT ? parseInt(process.env.LIMIT) : 100000; // Row limit for export (default: 100K) +const exportFilename = process.env.EXPORT_FILENAME || 'export.csv'; // Custom filename for exported file + +// Validate row limit +if (rowLimit > 1000000) { + console.log('ERROR: Row limit exceeds maximum allowed (1,000,000)'); + process.exit(1); +} + +console.log('Configuration:'); +console.log(' Workbook ID: ' + workbookId); +console.log(' Element ID: ' + elementId); +console.log(' Date range: ' + (startDate && endDate ? startDate + ' to ' + endDate : 'All data')); +console.log(' Row limit: ' + rowLimit.toLocaleString() + ' rows'); +console.log(' Output filename: ' + exportFilename); + +async function initiateExport(accessToken) { + // Prepare the options for the export request with correct format and filters + const exportOptions = { + elementId: elementId, + format: { type: 'csv' }, // Define the export format + runAsynchronously: true // Request the export to run asynchronously + }; + + // Add date range parameters if provided + if (startDate && endDate) { + exportOptions.parameters = { + "DateFilter": `min:${startDate},max:${endDate}` + }; + console.log('Date range applied: ' + startDate + ' to ' + endDate); + } else if (startDate || endDate) { + console.warn('Both START_DATE and END_DATE required for date filtering - skipping filter'); + } + + try { + const response = await axios.post( + `${baseURL}/workbooks/${workbookId}/export`, + exportOptions, + { headers: { Authorization: `Bearer ${accessToken}`, 'Content-Type': 'application/json' } } + ); + const queryId = response.data.queryId; + console.log('Export initiated - QueryID: ' + queryId); + return queryId; + } catch (error) { + console.error('Failed to initiate export:', error); + return null; + } +} + +async function checkExportReady(queryId, accessToken) { + // Continuously check if the export is ready for download + console.log('Checking export readiness for queryId: ' + queryId); + while (true) { + try { + const downloadUrl = baseURL + '/query/' + queryId + '/download'; + console.log('Download URL: ' + downloadUrl); + + const response = await axios.get(downloadUrl, { + headers: { Authorization: 'Bearer ' + accessToken }, + responseType: 'stream' + }); + + if (response.status === 200) { // Check if the export is ready + console.log('Export is ready for download.'); + return response.data; + } else { + console.log('Received unexpected status code: ' + response.status + ' (expected 200 or 204)'); + await new Promise(resolve => setTimeout(resolve, 3000)); // Wait before retry + } + } catch (error) { + if (error.response && error.response.status === 204) { + // Export not ready yet, wait before retrying + console.log('Export is not ready yet. Waiting to retry...'); + await new Promise(resolve => setTimeout(resolve, 10000)); + } else { + console.error('Failed to check export status:', error); + return null; + } + } + } +} + +async function downloadExport(data, filename) { + // Handle the download of the export file + const filePath = path.join(__dirname, filename); + const writer = fs.createWriteStream(filePath); + + return new Promise((resolve, reject) => { + data.pipe(writer); + let error = null; + + writer.on('error', err => { + error = err; + writer.close(); + reject(err); + }); + + writer.on('finish', () => { + if (!error) { + console.log('Export downloaded successfully to: ' + filePath); + resolve(true); + } + }); + }); +} + +async function exportWorkflow() { + try { + console.log('Authenticating...'); + const accessToken = await getBearerToken(); + if (!accessToken) { + console.log('ERROR: Authentication failed'); + return; + } + console.log('Initiating CSV export...'); + const exportOptions = { + elementId: elementId, + format: { type: 'csv' }, + runAsynchronously: true + }; + + // Add row limit (always included now) + exportOptions.rowLimit = rowLimit; + + // Add date range parameters if provided (requires workbook control with ID: API-Date-Filter) + if (startDate && endDate) { + exportOptions.parameters = { + "API-Date-Filter": "min:" + startDate + ",max:" + endDate + }; + } + + console.log('Final export options being sent to API:'); + console.log(JSON.stringify(exportOptions, null, 2)); + + const response = await axios.post( + baseURL + '/workbooks/' + workbookId + '/export', + exportOptions, + { headers: { Authorization: 'Bearer ' + accessToken, 'Content-Type': 'application/json' } } + ); + + const queryId = response.data.queryId; + if (!queryId) { + console.log('ERROR: No queryId received from export API'); + return; + } + + console.log('Export initiated - QueryID: ' + queryId); + + console.log('Waiting for export to complete...'); + const downloadUrl = baseURL + '/query/' + queryId + '/download'; + + let attempts = 0; + let lastStatus = null; + const loadingChars = ['|', '/', '-', '\\']; + + while (attempts < 20) { + attempts++; + + try { + const statusResponse = await axios.get(downloadUrl, { + headers: { Authorization: 'Bearer ' + accessToken }, + responseType: 'stream' + }); + + if (statusResponse.status === 200) { + console.log('Export ready! Downloading...'); + + // Capture the CSV data as chunks + let csvChunks = []; + let chunkCount = 0; + + statusResponse.data.on('data', (chunk) => { + chunkCount++; + csvChunks.push(chunk); + if (chunkCount % 50 === 0) { + console.log('Received chunk #' + chunkCount + ', total bytes so far: ' + Buffer.concat(csvChunks).length); + } + }); + + statusResponse.data.on('end', () => { + // Combine all chunks into a single buffer + const csvBuffer = Buffer.concat(csvChunks); + const sizeKB = Math.round(csvBuffer.length / 1024); + const csvText = csvBuffer.toString('utf8'); + const lineCount = csvText.split('\n').filter(line => line.trim()).length - 1; // -1 for header + console.log('CSV export completed (' + sizeKB + 'KB, ' + chunkCount + ' chunks, ~' + lineCount + ' rows)'); + + // Save file locally to project folder for verification + const projectSaveFilename = exportFilename || 'export.csv'; + const projectSavePath = path.join(__dirname, '../../', 'downloaded-files', projectSaveFilename); + + // Create directory if it doesn't exist + const downloadDir = path.dirname(projectSavePath); + if (!fs.existsSync(downloadDir)) { + fs.mkdirSync(downloadDir, { recursive: true }); + } + + // Save the raw CSV data to project folder + fs.writeFileSync(projectSavePath, csvBuffer); + console.log('File saved locally to: ' + projectSavePath); + + // Convert to base64 and output for UI capture + const base64Data = csvBuffer.toString('base64'); + + console.log('DOWNLOAD_RESULT_START'); + console.log('FILENAME:' + projectSaveFilename); + console.log('CONTENT:' + base64Data); + console.log('DOWNLOAD_RESULT_END'); + + // Brief delay before exit to allow UI to process the completion + setTimeout(() => { + process.exit(0); + }, 1000); + }); + + statusResponse.data.on('error', (err) => { + console.log('ERROR: Download error: ' + err.message); + setTimeout(() => { + process.exit(1); + }, 1000); + }); + + // Set a timeout in case the stream never ends + setTimeout(() => { + console.log('ERROR: Download timeout'); + process.exit(1); + }, 30000); + + return; + } + + } catch (error) { + if (error.response && error.response.status === 204) { + // Show animated loading indicator + const spinner = loadingChars[(attempts - 1) % loadingChars.length]; + const elapsed = attempts * 3; // 3 seconds per attempt + + if (lastStatus !== 204) { + console.log('Export processing...'); + lastStatus = 204; + } else if (attempts % 5 === 0) { + console.log('Still processing (' + elapsed + 's elapsed)'); + } + } else { + console.log('ERROR: Status check failed: ' + error.message); + if (error.response) { + console.log('ERROR: HTTP Status: ' + error.response.status); + } + } + } + + await new Promise(resolve => setTimeout(resolve, 3000)); + } + + console.log('ERROR: Export timed out after ' + attempts + ' attempts'); + + } catch (error) { + console.log('FATAL ERROR: ' + error.message); + } +} + +exportWorkflow(); \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/export-workbook-element-csv.md b/recipe-portal/recipes/workbooks/export-workbook-element-csv.md new file mode 100644 index 00000000..92848931 --- /dev/null +++ b/recipe-portal/recipes/workbooks/export-workbook-element-csv.md @@ -0,0 +1,62 @@ +# Export Workbook Element CSV + +## IMPORTANT USAGE NOTE: +In order for the data filtering to work, the workbook MUST have a date range page control with an ID of `API-Date-Filter`. + +Date filtering will not work without this! + +## API Endpoints Used + +- `POST /v2/workbooks/{workbookId}/export` ā [Export Workbook](https://help.sigmacomputing.com/reference/exportworkbook) +- `GET /v2/query/{queryId}/download` ā [Download Exported File](https://help.sigmacomputing.com/reference/downloadquery) + +## Output Location + +- CSV file saved to `recipe-portal/downloaded-files/{filename given}` +- Filename based on workbook and element being exported + +## Expected Output + +- Export job initiation confirmation with date range parameters +- Progress monitoring during export processing +- CSV file download when export completes + +## Use Cases + +- Extract specific table or chart data for analysis +- Generate CSV exports for external systems +- Create data extracts with custom date filtering +- Automate data export workflows + +## Important Notes + +- Exports specific workbook element (table/chart) rather than entire workbook +- Supports date range parameters for filtered exports +- Requires valid WORKBOOK_ID and ELEMENT_ID in environment variables + +## ā ļø Important Usage Notes + +**This is a learning tool** designed to help you understand common Sigma API patterns. It is **not intended for production use**. + +## Export Limitations & Recommendations + +**Row Limits**: +- **Default**: 100,000 rows (recommended for reliable downloads) +- **Maximum**: 1,000,000 rows (Sigma API limit) +- **Portal Limit**: Single request only - no batching implemented + +**Download Reliability**: +- Large downloads may timeout due to network limitations +- Files >100K rows may experience reliability issues +- For production use, implement proper batching patterns + +**Batching Not Implemented**: This portal demonstrates single-request exports only. Production applications should implement batch processing for large datasets using: +- Multiple requests with `rowLimit` and `offset` parameters +- Proper error handling and retry logic +- Progress tracking across multiple API calls + +**For Production Use**: +- Implement proper batching for datasets >100K rows +- Add timeout handling and retry mechanisms +- Use Sigma's web interface for large operational exports +- Consider data consistency implications during multi-request exports \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/export-workbook-pdf.js b/recipe-portal/recipes/workbooks/export-workbook-pdf.js new file mode 100644 index 00000000..1c5cf75f --- /dev/null +++ b/recipe-portal/recipes/workbooks/export-workbook-pdf.js @@ -0,0 +1,180 @@ +// Title: Export Workbook PDF +// Description: This script triggers an export to PDF job with date range parameters, and downloads the export once ready. The export will be the entire workbook in PDF format. +// +// PREREQUISITES: +// - Target workbook must exist and be accessible +// - For date filtering: Workbook must have a page control with ID "API-Date-Filter" +// (Control ID can be found in workbook edit mode > select control > Properties panel) +// - Workbook pages must be compatible with PDF export format + +// Environment variables are provided dynamically by the UI - no .env file needed + +// Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// Import Axios for making HTTP requests +const axios = require('axios'); + +// Load necessary modules for file handling +const fs = require('fs'); +const path = require('path'); + +// Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Base URL for the Sigma API +const workbookId = process.env.WORKBOOK_ID; // Workbook ID from which to export data + +async function initiateExport(accessToken) { + const exportOptions = { + workbookId: workbookId, + format: { type: 'pdf', layout: 'portrait' }, + }; + + + try { + const response = await axios.post( + baseURL + '/workbooks/' + workbookId + '/export', + exportOptions, + { headers: { Authorization: 'Bearer ' + accessToken, 'Content-Type': 'application/json' } } + ); + + const queryId = response.data.queryId; + if (!queryId) { + console.log('ERROR: No queryId received from export API'); + return null; + } + + // Direct output that bypasses all wrapper issues + process.stderr.write('DIRECT_LOG: Export initiated - QueryID: ' + queryId + '\n'); + process.stderr.write('DIRECT_LOG: QueryID Type: ' + typeof queryId + '\n'); + process.stderr.write('DIRECT_LOG: QueryID Length: ' + (queryId ? queryId.length : 'null') + '\n'); + + console.log('Export initiated - QueryID: ' + queryId); + return queryId; + } catch (error) { + console.log('ERROR: Failed to initiate export: ' + error.message); + return null; + } +} + +async function checkExportReady(queryId, accessToken) { + console.log('Waiting for export to complete...'); + const downloadUrl = baseURL + '/query/' + queryId + '/download'; + + let attempts = 0; + let lastStatus = null; + + while (attempts < 20) { + attempts++; + + try { + const statusResponse = await axios.get(downloadUrl, { + headers: { Authorization: 'Bearer ' + accessToken }, + responseType: 'stream' + }); + + if (statusResponse.status === 200) { + console.log('Export ready! Downloading...'); + return statusResponse.data; + } + + } catch (error) { + if (error.response && error.response.status === 204) { + // Show periodic progress updates without spinner complexity + if (lastStatus !== 204) { + console.log('Export processing...'); + lastStatus = 204; + } else if (attempts % 5 === 0) { + console.log('Still processing... (' + (attempts * 3) + 's elapsed)'); + } + } else { + console.log('ERROR: Status check failed: ' + error.message); + if (error.response) { + console.log('ERROR: HTTP Status: ' + error.response.status); + } + } + } + + await new Promise(resolve => setTimeout(resolve, 3000)); + } + + console.log('ERROR: Export timed out after ' + attempts + ' attempts'); + return null; +} + +async function exportWorkflow() { + try { + console.log('Authenticating...'); + const accessToken = await getBearerToken(); + if (!accessToken) { + console.log('ERROR: Authentication failed'); + return; + } + console.log('Authentication successful'); + + console.log('Initiating PDF export...'); + const queryId = await initiateExport(accessToken); + if (!queryId) { + console.log('ERROR: Failed to initiate export or obtain queryId'); + return; + } + + const data = await checkExportReady(queryId, accessToken); + if (data) { + // Capture the PDF data as binary chunks + let pdfChunks = []; + data.on('data', (chunk) => { + pdfChunks.push(chunk); + }); + + data.on('end', () => { + // Combine all chunks into a single buffer + const pdfBuffer = Buffer.concat(pdfChunks); + const sizeKB = Math.round(pdfBuffer.length / 1024); + console.log('Downloaded PDF: ' + sizeKB + 'KB'); + + // Save file locally to project folder for verification + const projectSaveFilename = 'export.pdf'; + const projectSavePath = path.join(__dirname, '../../', 'downloaded-files', projectSaveFilename); + + // Create directory if it doesn't exist + const downloadDir = path.dirname(projectSavePath); + if (!fs.existsSync(downloadDir)) { + fs.mkdirSync(downloadDir, { recursive: true }); + } + + // Save the raw PDF data to project folder + fs.writeFileSync(projectSavePath, pdfBuffer); + console.log('File saved locally to: ' + projectSavePath); + + // Convert to base64 and output for UI capture + const base64Data = pdfBuffer.toString('base64'); + const filename = 'export.pdf'; + + console.log('DOWNLOAD_RESULT_START'); + console.log('FILENAME:' + filename); + console.log('CONTENT:' + base64Data); + console.log('DOWNLOAD_RESULT_END'); + console.log('Export completed successfully'); + + // Brief delay before exit to allow UI to process the completion + setTimeout(() => { + process.exit(0); + }, 1000); + }); + + data.on('error', (err) => { + console.log('ERROR: Download error: ' + err.message); + setTimeout(() => { + process.exit(1); + }, 1000); + }); + } else { + console.log('ERROR: Failed to prepare the export for download'); + } + + } catch (error) { + console.log('FATAL ERROR: ' + error.message); + } +} + +exportWorkflow(); \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/export-workbook-pdf.md b/recipe-portal/recipes/workbooks/export-workbook-pdf.md new file mode 100644 index 00000000..1c64ba49 --- /dev/null +++ b/recipe-portal/recipes/workbooks/export-workbook-pdf.md @@ -0,0 +1,51 @@ +# Export Workbook PDF + +## API Endpoints Used + +- `POST /v2/workbooks/{workbookId}/export` ā [Export Workbook](https://help.sigmacomputing.com/reference/exportworkbook) +- `GET /v2/query/{queryId}/download` ā [Download Exported File](https://help.sigmacomputing.com/reference/downloadquery) + +## Output Location + +- **Local File**: Also saved to recipe-portal/downloaded-files/export.pdf + +## Expected Output + +- Export job initiation confirmation +- Progress monitoring during export processing +- PDF file download when export completes + +## Use Cases + +- Generate presentation-ready reports for stakeholders +- Create offline copies of dashboard data +- Schedule automated report distribution +- Archive workbook snapshots + +## Important Notes + +- Export includes entire workbook in portrait layout +- Process may take time depending on workbook complexity +- Requires valid WORKBOOK_ID in environment variables + +## ā ļø Important Usage Notes + +**This is a learning tool** designed to help you understand common Sigma API patterns. It is **not intended for production use**. + +## Export Limitations & Recommendations + +**File Size Limits**: +- **Recommended**: Workbooks under 10MB for reliable downloads +- **Large Files**: May experience reliability issues or timeout +- **Portal Limit**: Single request only - no batching implemented + +**Download Reliability**: +- Large PDF exports may timeout due to network limitations +- Complex workbooks with many pages may experience reliability issues +- For production use, implement proper timeout handling and retry logic + +**For Production Use**: +- Implement proper error handling and retry mechanisms +- Use Sigma's web interface for large operational exports +- Consider breaking large workbooks into smaller sections +- Add timeout handling for network reliability \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/get-column-names-all-pages b/recipe-portal/recipes/workbooks/get-column-names-all-pages new file mode 100644 index 00000000..2569347e --- /dev/null +++ b/recipe-portal/recipes/workbooks/get-column-names-all-pages @@ -0,0 +1,125 @@ +// get-column-names-all-pages.js + +// Load environment variables and necessary modules +require('dotenv').config({ path: 'sigma-api-recipes/.env' }); +const getBearerToken = require('../get-access-token'); +const axios = require('axios'); + +// Define environment variables +const baseURL = process.env.baseURL; // Base URL for Sigma API +const workbookId = process.env.WORKBOOK_ID; // Single workbook ID from .env + +// Function to fetch the workbook details +async function getWorkbookDetails(workbookId, accessToken) { + const requestURL = `${baseURL}/workbooks/${workbookId}`; + try { + const response = await axios.get(requestURL, { + headers: { Authorization: `Bearer ${accessToken}` } + }); + return response.data.name || "Unnamed Workbook"; // Return the workbook name or fallback + } catch (error) { + console.error(`Error fetching workbook details for ID ${workbookId}:`, error.response ? error.response.data : error.message); + throw error; // Stop execution on failure + } +} + +// Function to fetch pages for a workbook +async function getPages(workbookId, accessToken) { + const requestURL = `${baseURL}/workbooks/${workbookId}/pages`; + try { + const response = await axios.get(requestURL, { + headers: { Authorization: `Bearer ${accessToken}` } + }); + return response.data.entries || []; + } catch (error) { + console.error('Error fetching pages:', error.response ? error.response.data : error.message); + throw error; // Stop execution on failure + } +} + +// Function to fetch elements for a page +async function getElements(pageId, accessToken) { + const requestURL = `${baseURL}/workbooks/${workbookId}/pages/${pageId}/elements`; + try { + const response = await axios.get(requestURL, { + headers: { Authorization: `Bearer ${accessToken}` } + }); + return response.data.entries || []; + } catch (error) { + console.error(`Error fetching elements for page ${pageId}:`, error.response ? error.response.data : error.message); + throw error; // Stop execution on failure + } +} + +// Function to fetch columns for a specific element with pagination support +async function getColumns(workbookId, elementId, accessToken) { + const columns = []; + let nextPage = null; + + do { + const requestURL = `${baseURL}/workbooks/${workbookId}/elements/${elementId}/columns?limit=${process.env.LIMIT}${nextPage ? `&page=${nextPage}` : ''}`; + console.log(`Fetching columns for element: ${elementId} (Page: ${nextPage || '1'})`); + + try { + const response = await axios.get(requestURL, { + headers: { Authorization: `Bearer ${accessToken}` } + }); + + // Append retrieved columns to the list + columns.push(...response.data.entries); + + // Update nextPage token from the response, if present + nextPage = response.data.nextPage || null; + } catch (error) { + console.error(`Error fetching columns for element ${elementId}:`, error.response ? error.response.data : error.message); + throw error; // Stop execution on failure + } + } while (nextPage); + + return columns; +} + +// Main function to process the workbook +async function fetchColumnsForWorkbook() { + const accessToken = await getBearerToken(); + if (!accessToken) { + console.error('Failed to obtain Bearer token.'); + return; + } + + try { + // Fetch the workbook name dynamically + const workbookName = await getWorkbookDetails(workbookId, accessToken); + + console.log(`Processing workbook: ${workbookId}`); + console.log(`Workbook: ${workbookName}`); + + const pages = await getPages(workbookId, accessToken); + console.log(`Page Count: ${pages.length}\n`); + + for (const page of pages) { + console.log(`Page Name: ${page.name}`); + const elements = await getElements(page.pageId, accessToken); + + for (const element of elements) { + console.log(` - Element Name: ${element.name || 'Unnamed Element'}`); + console.log(` Element ID: ${element.elementId}`); + + // Fetch columns for this element + const columns = await getColumns(workbookId, element.elementId, accessToken); + + // Log column names and count for this element + console.log(` - Column Count: ${columns.length}`); + console.log(' - Columns:'); + columns.forEach(col => { + console.log(` - ${col.label}`); + }); + } + } + } catch (error) { + console.error(`Error processing workbook ${workbookId}:`, error.response ? error.response.data : error.message); + } +} + +// Execute the main function +fetchColumnsForWorkbook(); \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/get-workbooks-name-url-TABLE.js b/recipe-portal/recipes/workbooks/get-workbooks-name-url-TABLE.js new file mode 100644 index 00000000..822e4e5a --- /dev/null +++ b/recipe-portal/recipes/workbooks/get-workbooks-name-url-TABLE.js @@ -0,0 +1,69 @@ +// Title: Get Workbooks Name, Version and URL +// Description: This script lists all workbook names, URLs, and version numbers in tabular form. +// Swagger: https://help.sigmacomputing.com/reference/listworkbooks-1 + +// 1: Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// 2: Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// 3: Import Axios for making HTTP requests +const axios = require('axios'); + +// 4: Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Your base URL + +// Function to fetch workbooks with pagination support +async function fetchWorkbooksWithPagination(url, accessToken) { + let results = []; + let nextPageToken = ''; // Initialize pagination token + do { + const fullUrl = `${url}${nextPageToken ? `?page=${nextPageToken}` : ''}`; + console.log(`Fetching: ${fullUrl}`); + try { + const response = await axios.get(fullUrl, { + headers: { 'Authorization': `Bearer ${accessToken}` }, + }); + results = [...results, ...response.data.entries]; + nextPageToken = response.data.nextPage; // Update the nextPageToken with the next page value + } catch (error) { + console.error(`Error fetching workbooks: ${error}`); + break; // Exit loop on error + } + } while (nextPageToken); // Continue fetching pages until there's no nextPageToken + return results; +} + +// Main function to list workbooks +async function listWorkbooks() { + const accessToken = await getBearerToken(); + if (!accessToken) { + console.error('Failed to obtain Bearer token.'); + return; + } + + // Fetch all workbooks with pagination support + const workbooks = await fetchWorkbooksWithPagination(`${baseURL}/workbooks`, accessToken); + + if (workbooks.length > 0) { + // Prepare the data for display in a table format + const workbooksForTable = workbooks.map((workbook, index) => ({ + Name: workbook.name, // Use the names directly + URL: workbook.url, // URL for the workbook + Version: workbook.latestVersion // Latest version of the workbook + })); + + // Display the prepared data as a table in the console + console.table(workbooksForTable); + } else { + console.log('No workbooks found.'); + } +} + +// Execute the listWorkbooks function if running as the main module +if (require.main === module) { + listWorkbooks(); +} + +module.exports = listWorkbooks; diff --git a/recipe-portal/recipes/workbooks/get-workbooks-name-url-TABLE.md b/recipe-portal/recipes/workbooks/get-workbooks-name-url-TABLE.md new file mode 100644 index 00000000..0d4d2e24 --- /dev/null +++ b/recipe-portal/recipes/workbooks/get-workbooks-name-url-TABLE.md @@ -0,0 +1,25 @@ +# Get Workbooks Name, Version and URL + +## API Endpoints Used + +- `GET /v2/workbooks` ā [List Workbooks](https://help.sigmacomputing.com/reference/listworkbooks) + +## Expected Output + +- Tabular format showing workbook names, version numbers and URLs +- Clean, organized display suitable for reports or documentation +- Direct links to each workbook + +## Use Cases + +- Generate workbook directory for users +- Create workbook index for documentation +- Build workbook navigation tools +- Export workbook catalog for external use + +## Important Notes + +- Results formatted in clean table structure +- Includes direct URL links for easy workbook access +- Version numbers help track workbook iterations +- Suitable for both display and export purposes \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/initiate-materialization.js b/recipe-portal/recipes/workbooks/initiate-materialization.js new file mode 100644 index 00000000..ebb23b7b --- /dev/null +++ b/recipe-portal/recipes/workbooks/initiate-materialization.js @@ -0,0 +1,137 @@ +// Title: Initiate Materialization +// Description: This script starts a materialization job for a specified workbook and retrieves its status. + +// 1: Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// 2: Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// 3: Import Axios for making HTTP requests +const axios = require('axios'); + +// 4: Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Your base URL +const workbookId = process.env.WORKBOOK_ID; // Workbook ID for materialization +const sheetId = process.env.SHEET_ID; // Optional: specific sheet ID to materialize + +// Function to start a materialization job +async function startMaterialization() { + try { + const accessToken = await getBearerToken(); + if (!accessToken) { + console.error('Failed to obtain Bearer token.'); + return; + } + + // Fetch materialization schedules to get the correct sheet ID + const materializationSchedulesURL = `${baseURL}/workbooks/${workbookId}/materialization-schedules`; + console.log(`URL sent to Sigma: ${materializationSchedulesURL}`); + const response = await axios.get(materializationSchedulesURL, { + headers: { + 'Authorization': `Bearer ${accessToken}` + } + }); + + // Parse the sheet ID from the response if schedules are available + const materializationSchedules = response.data.entries || response.data || []; + if (materializationSchedules.length === 0) { + console.error('No materialization schedules found for the specified workbook.'); + return; + } + + console.log(`Found ${materializationSchedules.length} materialization schedule(s):`); + materializationSchedules.forEach((schedule, index) => { + console.log(` ${index}: "${schedule.elementName}" (${schedule.schedule.cronSpec} ${schedule.schedule.timezone}${schedule.paused ? ' - PAUSED' : ''})`); + }); + + // Determine which schedule to use + let selectedSchedule; + + if (sheetId) { + // Find schedule by sheet ID + selectedSchedule = materializationSchedules.find(schedule => + schedule.sheetId === sheetId + ); + if (!selectedSchedule) { + console.error(`No materialization schedule found for sheet ID: "${sheetId}"`); + console.error('Available element names:', materializationSchedules.map(s => `"${s.elementName}"`).join(', ')); + console.error('Available sheet IDs:', materializationSchedules.map(s => s.sheetId).join(', ')); + return; + } + console.log(`Selected schedule by sheet ID: "${selectedSchedule.elementName}" (${selectedSchedule.sheetId})`); + } else if (materializationSchedules.length === 1) { + // Only one schedule, use it + selectedSchedule = materializationSchedules[0]; + console.log(`Using the only available schedule: "${selectedSchedule.elementName}"`); + } else { + // Multiple schedules, use the first one but warn user + selectedSchedule = materializationSchedules[0]; + console.log(`Multiple schedules available. Using first one: "${selectedSchedule.elementName}"`); + console.log('Tip: Use SHEET_ID to specify which schedule to run.'); + } + + const targetSheetId = selectedSchedule.sheetId; + console.log(`Starting materialization job for workbook ${workbookId}, element "${selectedSchedule.elementName}", sheet ${targetSheetId}...`); + + const materializationsURL = `${baseURL}/workbooks/${workbookId}/materializations`; + console.log(`URL sent to Sigma: ${materializationsURL}`); + const startResponse = await axios.post(materializationsURL, { + sheetId: targetSheetId + }, { + headers: { + 'Authorization': `Bearer ${accessToken}`, + 'Content-Type': 'application/json' + } + }); + + console.log('Materialization job started successfully:', startResponse.data); + + // Retrieve materialization status + const materializationId = startResponse.data.materializationId; // Correctly extract the materialization ID + await checkMaterializationStatus(materializationId, accessToken, materializationsURL); + } catch (error) { + console.error('Error starting materialization job:', error.response ? error.response.data : error); + } +} + +// Function to check materialization status +async function checkMaterializationStatus(materializationId, accessToken, materializationsURL) { + try { + const materializationStatusURL = `${materializationsURL}/${materializationId}`; + console.log(`URL sent to Sigma: ${materializationStatusURL}`); + console.log(`Checking materialization status for materialization ID: ${materializationId}`); + + const response = await axios.get(materializationStatusURL, { + headers: { + 'Authorization': `Bearer ${accessToken}` + } + }); + + console.log('Materialization status:', response.data.status); + + // Check if the materialization is complete (ready or failed) + if (response.data.status === 'ready') { + console.log('Materialization job completed successfully.'); + return; // Stop the script execution + } + + if (response.data.status === 'failed' || response.data.status === 'FAILED') { + console.log('Materialization job failed.'); + return; // Stop the script execution + } + + // Continue polling if still in progress + if (response.data.status === 'pending' || response.data.status === 'building') { + await new Promise(resolve => setTimeout(resolve, 5000)); // Wait for 5 seconds + await checkMaterializationStatus(materializationId, accessToken, materializationsURL); + } else { + console.log(`Unexpected materialization status: ${response.data.status}`); + } + } catch (error) { + console.error('Error checking materialization status:', error.response ? error.response.data : error); + } +} + +// Execute the script +startMaterialization(); diff --git a/recipe-portal/recipes/workbooks/initiate-materialization.md b/recipe-portal/recipes/workbooks/initiate-materialization.md new file mode 100644 index 00000000..bd3afcc8 --- /dev/null +++ b/recipe-portal/recipes/workbooks/initiate-materialization.md @@ -0,0 +1,35 @@ +# Initiate Materialization + +## API Endpoints Used + +- `GET /v2/workbooks/{workbookId}/materialization-schedules` ā List available schedules +- `POST /v2/workbooks/{workbookId}/materializations` ā [Create Materialization](https://help.sigmacomputing.com/reference/creatematerialization) +- `GET /v2/workbooks/{workbookId}/materializations/{materializationId}` ā [Get Materialization Status](https://help.sigmacomputing.com/reference/getmaterializationstatus) + +## Required Setup + +1. **Authentication**: Valid credentials with workbook access permissions +2. **Materialization Schedule**: Workbook must have at least one materialization schedule configured +3. **Schedule Selection**: Use the "Schedule Name" dropdown to select which materialization to run + +## Expected Output + +- List of all available materialization schedules with timing details +- Selected schedule confirmation and job initiation +- Real-time status monitoring (`pending` ā `building` ā `ready`) +- Final completion message with job performance details + +## Use Cases + +- Pre-compute expensive workbook calculations for faster loading +- Run on-demand materializations outside of scheduled times +- Test materialization performance for different workbook elements +- Refresh materialized views before important presentations or reports + +## Important Notes + +- **UI Interface**: Recipe Portal displays user-friendly schedule names but sends sheet IDs to the API +- **Execution Time**: Jobs typically complete in 1-3 minutes with 5-minute timeout +- **Log Display**: All execution logs shown after completion (not real-time streaming) +- **Status Flow**: Monitor progress through `pending` ā `building` ā `ready` (successful) or `failed` +- **Schedule Availability**: Only non-paused schedules appear in dropdown options \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/list-all.js b/recipe-portal/recipes/workbooks/list-all.js new file mode 100644 index 00000000..c0819b53 --- /dev/null +++ b/recipe-portal/recipes/workbooks/list-all.js @@ -0,0 +1,121 @@ +// Title: List All Workbooks with Details +// Description: This script lists all workbooks, returning name, URL, URL ID, and version. +// +// PREREQUISITES: +// - Valid authentication credentials required +// - User must have appropriate permissions to list workbooks +// - For large organizations: Results are paginated, use LIMIT and MAX_PAGES parameters to control output + +// 1: Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: '../../../sigma-api-recipes/.env' }); + +// 2: Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// 3: Import Axios for making HTTP requests +const axios = require('axios'); + +// 4: Load necessary modules for file handling +const fs = require('fs'); +const path = require('path'); + +// 5: Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Your base URL +const limit = parseInt(process.env.LIMIT) || 100000; // Maximum workbooks to retrieve (Default: 100,000 | Max: 1,000,000) +const maxLimit = 1000000; + +// Define an asynchronous function to fetch workbooks with pagination +async function fetchWorkbooksWithPagination(url, accessToken) { + // Validate row limit + if (limit > maxLimit) { + actualLimit = maxLimit; + } else { + actualLimit = limit; + } + + let results = []; + let nextPageToken = ''; // Initialize pagination token + let pageNum = 0; + do { + pageNum++; + const fullUrl = url + (nextPageToken ? '?page=' + nextPageToken : ''); + console.log('Fetching page ' + pageNum + '...'); + try { + const response = await axios.get(fullUrl, { + headers: { 'Authorization': 'Bearer ' + accessToken }, + }); + results = [...results, ...response.data.entries]; + + console.log('Found ' + response.data.entries.length + ' workbooks on page ' + pageNum); + + // Check if we've reached the limit + if (results.length >= actualLimit) { + results = results.slice(0, actualLimit); + break; + } + + nextPageToken = response.data.nextPage; // Update the nextPageToken with the next page value + } catch (error) { + console.log('Error fetching workbooks: ' + error.message); + break; // Exit loop on error + } + } while (nextPageToken); // Continue fetching pages until there's no nextPageToken + return results; +} + +// Define an asynchronous function to list workbooks +async function listWorkbooks() { + try { + console.log('Authenticating...'); + const accessToken = await getBearerToken(); + if (!accessToken) { + console.log('ERROR: Authentication failed'); + return; + } + console.log('Authentication successful'); + + console.log('Fetching workbooks (limit: ' + limit + ')...'); + // Fetch all workbooks with pagination + const workbooks = await fetchWorkbooksWithPagination(`${baseURL}/workbooks`, accessToken); + + if (workbooks.length > 0) { + console.log('Found ' + workbooks.length + ' workbooks:'); + console.log(''); + + // Display detailed information for each workbook in a clean format + workbooks.forEach((workbook, index) => { + console.log('=== Workbook #' + (index + 1) + ' ==='); + console.log('Name: ' + (workbook.name || 'Unknown')); + console.log('URL: ' + (workbook.url || 'No URL')); + console.log('URL ID: ' + (workbook.workbookUrlId || 'No URL ID')); + console.log('Path: ' + (workbook.path || 'No path')); + console.log('Latest Version: ' + (workbook.latestVersion || 'Unknown')); + console.log('Created: ' + (workbook.createdAt ? new Date(workbook.createdAt).toLocaleDateString() : 'Unknown')); + console.log('Updated: ' + (workbook.updatedAt ? new Date(workbook.updatedAt).toLocaleDateString() : 'Unknown')); + console.log('Owner ID: ' + (workbook.ownerId || 'Unknown')); + console.log(''); + }); + + console.log('=== SUMMARY ==='); + console.log('Total Workbooks: ' + workbooks.length); + console.log('Export completed successfully'); + + // Brief delay before exit to allow UI to process the completion + setTimeout(() => { + process.exit(0); + }, 2000); + } else { + console.log('No workbooks found.'); + } + } catch (error) { + console.log('FATAL ERROR: ' + error.message); + } +} + +// Execute the function to list workbooks if this script is run directly +if (require.main === module) { + listWorkbooks(); +} + +// Export the listWorkbooks function for reuse in other modules +module.exports = listWorkbooks; \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/list-all.md b/recipe-portal/recipes/workbooks/list-all.md new file mode 100644 index 00000000..e7d83b1e --- /dev/null +++ b/recipe-portal/recipes/workbooks/list-all.md @@ -0,0 +1,52 @@ +# List All Workbooks with Details + +## API Endpoints Used + +- `GET /v2/workbooks` ā [List Workbooks](https://help.sigmacomputing.com/reference/listworkbooks) + +## Expected Output + +- **Console Response Display**: Clean, structured workbook information in Response tab +- **Workbook Details**: Name, URL, URL ID, path, version, creation/update dates, owner ID +- **Pagination Support**: Handles multiple pages automatically +- **Summary**: Total count of workbooks retrieved + +## Use Cases + +- Generate complete workbook inventory +- Audit all analytical content in organization +- Export workbook metadata for reporting +- Get workbook IDs for other automation scripts + +## Important Notes + +- Returns all workbooks visible to the authenticated user +- Includes both active and archived workbooks +- URL ID can be used to construct direct workbook links + +## ā ļø Important Usage Notes + +**This is a learning tool** designed to help you understand common Sigma API patterns. It is **not intended for production use**. + +## Export Limitations & Recommendations + +**Row Limits**: +- **Default**: 100,000 workbooks (recommended for reliable downloads) +- **Maximum**: 1,000,000 workbooks (Sigma API limit) +- **Portal Limit**: Single request only - no batching implemented + +**Download Reliability**: +- Large downloads may timeout due to network limitations +- Organizations with >100K workbooks may experience reliability issues +- For production use, implement proper batching patterns + +**Batching Not Implemented**: This portal demonstrates single-request exports only. Production applications should implement batch processing for large datasets using: +- Multiple requests with proper pagination handling +- Proper error handling and retry logic +- Progress tracking across multiple API calls + +**For Production Use**: +- Implement proper batching for datasets >100K workbooks +- Add timeout handling and retry mechanisms +- Use proper pagination patterns for large organizations +- Consider data consistency implications during multi-request exports \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/list-workbooks-by-owner.js b/recipe-portal/recipes/workbooks/list-workbooks-by-owner.js new file mode 100644 index 00000000..f7ce8f7f --- /dev/null +++ b/recipe-portal/recipes/workbooks/list-workbooks-by-owner.js @@ -0,0 +1,71 @@ +// Title: List Workbooks by Owner +// Description: This script lists all workbooks owned by a specific member. + +// Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// Import Axios for making HTTP requests +const axios = require('axios'); + +// Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Your base URL +const ownerId = process.env.MEMBERID; // Member ID to filter workbooks by owner + +async function listWorkbooksByOwner() { + const token = await getBearerToken(); + + if (!token) { + console.error("Failed to obtain token, cannot proceed to fetch workbooks."); + return; + } + + if (!ownerId) { + console.error("MEMBERID is required to filter workbooks by owner."); + return; + } + + try { + console.log(`Fetching workbooks owned by member: ${ownerId}`); + + // Construct URL with owner filter only + const url = `${baseURL}/workbooks?ownerId=${ownerId}`; + console.log(`Fetching from: ${url}`); + + const response = await axios.get(url, { + headers: { Authorization: `Bearer ${token}` } + }); + + const workbooks = response.data.entries || []; + console.log(`Found ${workbooks.length} workbooks owned by this member`); + + if (workbooks.length > 0) { + // Display workbooks in table format + const workbooksForTable = workbooks.map((workbook, index) => ({ + "#": index + 1, + Name: workbook.name, + Path: workbook.path || 'N/A', + Created: workbook.createdAt ? new Date(workbook.createdAt).toLocaleDateString() : 'N/A', + Modified: workbook.modifiedAt ? new Date(workbook.modifiedAt).toLocaleDateString() : 'N/A', + Version: workbook.latestVersion || 'N/A', + WorkbookId: workbook.workbookId + })); + + console.table(workbooksForTable); + } else { + console.log("No workbooks found for this owner."); + } + + console.log(`\\nš Summary: Found ${workbooks.length} workbooks owned by member ${ownerId}`); + + } catch (error) { + console.error('Error fetching workbooks:', error.response ? error.response.data : error.message); + } +} + +// Execute the function +listWorkbooksByOwner().catch(error => { + console.error('Failed to fetch workbooks:', error); +}); \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/list-workbooks-by-owner.md b/recipe-portal/recipes/workbooks/list-workbooks-by-owner.md new file mode 100644 index 00000000..0ca890f9 --- /dev/null +++ b/recipe-portal/recipes/workbooks/list-workbooks-by-owner.md @@ -0,0 +1,30 @@ +# List Workbooks by Owner + +## API Endpoints Used + +- `GET /v2/workbooks?ownerId={memberId}` ā [List Workbooks](https://help.sigmacomputing.com/reference/listworkbooks) + +## Expected Output + +- Table display of all workbooks owned by the specified member +- Workbook details including name, path, creation date, modification date, and version +- Summary count of total workbooks found + +## Parameters + +- **MEMBERID**: ID of the member whose workbooks to list + +## Use Cases + +- Audit workbooks owned by specific users +- Inventory management for user offboarding +- Track content creation by team members +- Generate ownership reports for compliance + +## Important Notes + +- Requires valid MEMBERID to specify the workbook owner +- Fetches all workbooks in a single request (no pagination needed) +- Returns comprehensive workbook metadata including dates and versions +- Filters results to show only workbooks owned by the specified member +- Generally returns small result sets since filtered by single owner \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/pagination.js b/recipe-portal/recipes/workbooks/pagination.js new file mode 100644 index 00000000..dd83b36e --- /dev/null +++ b/recipe-portal/recipes/workbooks/pagination.js @@ -0,0 +1,98 @@ +// Title: Pagination +// Description: This script returns latest version, name and path for all workbooks. + +// 1: Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// 2: Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// 3: Import Axios for making HTTP requests +const axios = require('axios'); + +// 4: Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Your base URL +const limit = parseInt(process.env.LIMIT) || 100; // Number of results per page (max 1,000) +const maxPages = parseInt(process.env.MAX_PAGES) || 0; // Maximum pages to fetch (0 = all pages) + +async function getAllWorkbooks() { + let hasMore = true; // Initialize hasMore to true for the first request + let nextPage = null; // Start from beginning + let currentPage = 0; // Initialize currentPage counter + let token = await getBearerToken(); // Obtain a bearer token for authentication + + + if (!token) { + console.error("Failed to obtain token, cannot proceed to fetch workbooks."); + return; // Exit the function if token acquisition fails + } + + if (maxPages > 0) { + console.log(`Fetching up to ${maxPages} pages with ${limit} results per page`); + } else { + console.log(`Fetching all pages with ${limit} results per page`); + } + + while (hasMore && (maxPages === 0 || currentPage < maxPages)) { + try { + currentPage++; // Increment the currentPage counter for each iteration + // Construct URL with proper query parameters + // Note: nextPage tokens from Sigma API are already URL-encoded + let url = `${baseURL}/workbooks?limit=${limit}`; + if (nextPage) { + // Use the nextPage token as-is since it's already properly encoded + url += `&page=${nextPage}`; + } + + console.log(`Fetching page ${currentPage} from Sigma: ${url}`); // Log the constructed URL before sending the request + + const response = await axios.get(url, { + headers: { Authorization: `Bearer ${token}` } // Authorization header with the bearer token + }); + + // Process current page workbooks for table display + const entries = response.data.entries || []; + const total = response.data.total || 'Unknown'; + + console.log(`\nPage ${currentPage} Results (${entries.length} of ${total} total workbooks):`); + console.log(`Has more pages: ${response.data.hasMore ? 'Yes' : 'No'}`); + if (response.data.nextPage) { + console.log(`Next page token: ${response.data.nextPage}`); + } + + const workbooksForTable = entries.map((workbook, index) => ({ + "#": index + 1, // Sequence number within the current page + Name: workbook.name, // Workbook name + Path: workbook.path, // Workbook path + LatestVersion: workbook.latestVersion // Latest version of the workbook + })); + + console.table(workbooksForTable); // Display the current page workbooks in table format + + // Handle different possible response structures for pagination + const apiHasMore = response.data.hasMore; + const apiNextPage = response.data.nextPage; + + // If hasMore is undefined but nextPage exists, assume there are more pages + // If hasMore is explicitly false, respect that + if (apiHasMore === false) { + hasMore = false; + } else if (apiNextPage) { + hasMore = true; + } else { + hasMore = false; + } + + nextPage = apiNextPage; + + + } catch (error) { + console.error('Error fetching workbooks:', error); + break; // Exit the loop in case of an error + } + } +} + +getAllWorkbooks().catch(error => { + console.error('Failed to fetch workbooks:', error); +}); diff --git a/recipe-portal/recipes/workbooks/pagination.md b/recipe-portal/recipes/workbooks/pagination.md new file mode 100644 index 00000000..0c1b5ac4 --- /dev/null +++ b/recipe-portal/recipes/workbooks/pagination.md @@ -0,0 +1,39 @@ +# Pagination + +## API Endpoints Used + +- `GET /v2/workbooks` ā [List Workbooks](https://help.sigmacomputing.com/reference/listworkbooks) + +## Expected Output + +- Paginated workbook results formatted as a table +- Each page shows workbook name, path, and latest version +- Navigation information showing current page, total results, and pagination status +- Next page tokens for continued pagination + +## Parameters + +- **LIMIT**: Number of results per page (max 1,000 per request) +- **MAX_PAGES**: Maximum number of pages to fetch (0 = all pages) + +## Use Cases + +- Handle large workbook datasets efficiently with controlled page sizes +- Build paginated user interfaces for workbook browsing +- Manage memory usage when processing many workbooks +- Create structured reports with controlled data volume +- Process large datasets with memory-efficient pagination + +## Pagination Pattern + +1. **First request**: Include `limit` parameter, no `page` parameter +2. **Subsequent requests**: Include both `limit` and `page` parameters +3. **Page tokens**: Use `nextPage` value from response for next request +4. **Completion**: Stop when `hasMore` is false or no `nextPage` token + +## Important Notes + +- Uses Sigma's standard pagination pattern with `limit` and `page` parameters +- Page tokens are handled automatically by the script +- Maximum of 1,000 results per page enforced by Sigma API +- Results include pagination metadata displayed in console output \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/shared-with-memberId.js b/recipe-portal/recipes/workbooks/shared-with-memberId.js new file mode 100644 index 00000000..9d1e8988 --- /dev/null +++ b/recipe-portal/recipes/workbooks/shared-with-memberId.js @@ -0,0 +1,76 @@ +// Title: Shared with Member ID +// Description: This script lists all workbook names, URLs, and version numbers for the specified memberId. + +// 1: Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// 2: Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// 3: Import Axios for making HTTP requests +const axios = require('axios'); + +// 4: Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Your base URL +const memberId = process.env.MEMBERID; // Retrieve the memberId from .env + +// Function to fetch all items with limit = 500 +async function fetchMemberFiles(memberId, accessToken) { + try { + const fullUrl = `${baseURL}/members/${memberId}/files?typeFilters=workbook&limit=500`; + console.log(`Fetching member files: ${fullUrl}`); + const response = await axios.get(fullUrl, { + headers: { + 'Authorization': `Bearer ${accessToken}`, + }, + }); + + return response.data.entries; + } catch (error) { + console.error('Error fetching member files:', error); + return []; + } +} + +// Main function to list workbooks accessible to a specific member +async function listAccessibleWorkbooks() { + const accessToken = await getBearerToken(); + if (!accessToken) return; + + try { + // Fetching all URL IDs from the member's files + const memberFiles = await fetchMemberFiles(memberId, accessToken); + const memberFileIds = memberFiles.map(file => file.id); + + // Fetching all workbooks with a manual limit of 200 rows + const workbooksUrl = `${baseURL}/workbooks?limit=200`; + const response = await axios.get(workbooksUrl, { + headers: { + 'Authorization': `Bearer ${accessToken}`, + }, + }); + const allWorkbooks = response.data.entries; + + // Filtering workbooks accessible to the member + const accessibleWorkbooks = allWorkbooks.filter(workbook => { + // Check if the workbookId is in the member's file IDs + return memberFileIds.includes(workbook.workbookId); + }); + + console.log(`Fetched ${accessibleWorkbooks.length} workbooks.`); + + // Displaying the filtered workbooks + if (accessibleWorkbooks.length > 0) { + accessibleWorkbooks.forEach((workbook, index) => { + console.log(`#${index + 1}: Name: ${workbook.name}, URL: ${workbook.url}, Latest Version: ${workbook.latestVersion}`); + }); + } else { + console.log('No matching workbooks found for this member based on workbookIds.'); + } + } catch (error) { + console.error('Error listing accessible workbooks:', error); + } +} + +// Execute the function to list accessible workbooks +listAccessibleWorkbooks(); \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/shared-with-memberId.md b/recipe-portal/recipes/workbooks/shared-with-memberId.md new file mode 100644 index 00000000..aa42c143 --- /dev/null +++ b/recipe-portal/recipes/workbooks/shared-with-memberId.md @@ -0,0 +1,25 @@ +# Shared with Member ID + +## API Endpoints Used + +- `GET /v2/workbooks` ā [List Workbooks](https://help.sigmacomputing.com/reference/listworkbooks) + +## Expected Output + +- List of workbook names, URLs, and version numbers +- Filtered to show only workbooks shared with the specified member +- Tabular format for easy reading and analysis + +## Use Cases + +- Audit content shared with specific users +- Generate personalized workbook lists for users +- Track workbook access and sharing patterns +- Create user-specific content inventories + +## Important Notes + +- Results filtered by MEMBERID in environment variables +- Shows both directly shared and team-shared workbooks +- Includes workbook version information for tracking changes +- Useful for access auditing and compliance reporting \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/test-export b/recipe-portal/recipes/workbooks/test-export new file mode 100644 index 00000000..1f1b3737 --- /dev/null +++ b/recipe-portal/recipes/workbooks/test-export @@ -0,0 +1,25 @@ +Product Line,Sum of Order Number +4K Ultra,61217034632 +AT&T,42951117924 +Accessories,66423219497 +Arts & Crafts,101042124341 +Bluetooth Speakers,52956341726 +Collectibles,27534431243 +Curved TV,51444670642 +Desktops,73123415569 +Headphones,100262456287 +LED TV,74308521453 +Laptops,45698009659 +Lenses,92723194824 +Memory Cards,72778330055 +Monitors,48647034257 +Musical Instrument & Orchestra Accessories,112747824009 +Musical Instruments,19627764581 +Party Supplies,370490541 +Point & Shoot,46488392310 +Printers,68842076582 +SLR Cameras,56505635100 +Smart TV,76368792266 +Sprint,86041580509 +Verizon,84708895225 +iPods,71756567342 \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/test-export.pdf b/recipe-portal/recipes/workbooks/test-export.pdf new file mode 100644 index 00000000..255aaa1b Binary files /dev/null and b/recipe-portal/recipes/workbooks/test-export.pdf differ diff --git a/recipe-portal/recipes/workbooks/test.temp b/recipe-portal/recipes/workbooks/test.temp new file mode 100644 index 00000000..f8b8c9a2 --- /dev/null +++ b/recipe-portal/recipes/workbooks/test.temp @@ -0,0 +1,135 @@ +Aisle Id,Aisle Name +1,prepared soups salads +2,specialty cheeses +3,energy granola bars +4,instant foods +5,marinades meat preparation +6,other +7,packaged meat +8,bakery desserts +9,pasta sauce +10,kitchen supplies +11,cold flu allergy +12,fresh pasta +13,prepared meals +14,tofu meat alternatives +15,packaged seafood +16,fresh herbs +17,baking ingredients +18,bulk dried fruits vegetables +19,oils vinegars +20,oral hygiene +21,packaged cheese +22,hair care +23,popcorn jerky +24,fresh fruits +25,soap +26,coffee +27,beers coolers +28,red wines +29,honeys syrups nectars +30,latino foods +31,refrigerated +32,packaged produce +33,kosher foods +34,frozen meat seafood +35,poultry counter +36,butter +37,ice cream ice +38,frozen meals +39,seafood counter +40,dog food care +41,cat food care +42,frozen vegan vegetarian +43,buns rolls +44,eye ear care +45,candy chocolate +46,mint gum +47,vitamins supplements +48,breakfast bars pastries +49,packaged poultry +50,fruit vegetable snacks +51,preserved dips spreads +52,frozen breakfast +53,cream +54,paper goods +55,shave needs +56,diapers wipes +57,granola +58,frozen breads doughs +59,canned meals beans +60,trash bags liners +61,cookies cakes +62,white wines +63,grains rice dried goods +64,energy sports drinks +65,protein meal replacements +66,asian foods +67,fresh dips tapenades +68,bulk grains rice dried goods +69,soup broth bouillon +70,digestion +71,refrigerated pudding desserts +72,condiments +73,facial care +74,dish detergents +75,laundry +76,indian foods +77,soft drinks +78,crackers +79,frozen pizza +80,deodorants +81,canned jarred vegetables +82,baby accessories +83,fresh vegetables +84,milk +85,food storage +86,eggs +87,more household +88,spreads +89,salad dressing toppings +90,cocoa drink mixes +91,soy lactosefree +92,baby food formula +93,breakfast bakery +94,tea +95,canned meat seafood +96,lunch meat +97,baking supplies decor +98,juice nectars +99,canned fruit applesauce +100,missing +101,air fresheners candles +102,baby bath body care +103,ice cream toppings +104,spices seasonings +105,doughs gelatins bake mixes +106,hot dogs bacon sausage +107,chips pretzels +108,other creams cheeses +109,skin care +110,pickled goods olives +111,plates bowls cups flatware +112,bread +113,frozen juice +114,cleaning products +115,water seltzer sparkling water +116,frozen produce +117,nuts seeds dried fruit +118,first aid +119,frozen dessert +120,yogurt +121,cereal +122,meat counter +123,packaged vegetables fruits +124,spirits +125,trail mix snack mix +126,feminine care +127,body lotions soap +128,tortillas flat bread +129,frozen appetizers sides +130,hot cereal pancake mixes +131,dry pasta +132,beauty +133,muscles joints pain relief +134,specialty wines champagnes \ No newline at end of file diff --git a/recipe-portal/recipes/workbooks/update-owner.js b/recipe-portal/recipes/workbooks/update-owner.js new file mode 100644 index 00000000..404efb16 --- /dev/null +++ b/recipe-portal/recipes/workbooks/update-owner.js @@ -0,0 +1,44 @@ +// Title: Update Owner +// Description: This script updates the ownership of a specified inode in Sigma using the "Update an inode" endpoint. + +// Load environment variables from a specific .env file for configuration +require('dotenv').config({ path: 'recipes/.env' }); + +// Import the function to obtain a bearer token from the authenticate-bearer module +const getBearerToken = require('../get-access-token'); + +// Import Axios for making HTTP requests +const axios = require('axios'); + +// Load use-case specific variables from environment variables +const baseURL = process.env.baseURL; // Base URL for the Sigma API +const memberid = process.env.MEMBERID; // New member ID to assign to the inode as owner +const workbookId = process.env.WORKBOOK_ID; // Workbook ID to be used as the urlId for the inode + +// Function to update the ownerId of a specified inode using the workbookId +async function updateInodeOwner(workbookId) { + const token = await getBearerToken(); + if (!token) { + console.error('Failed to obtain Bearer token.'); + return; + } + + try { + const response = await axios.patch( + `${baseURL}/files/${workbookId}`, + { ownerId: memberid }, // Use `ownerId` with `memberid` value from .env + { + headers: { + 'Authorization': `Bearer ${token}`, + 'Content-Type': 'application/json' + } + } + ); + console.log('Inode updated successfully:', response.data); + } catch (error) { + console.error('Error updating inode:', error.response ? error.response.data : error); + } +} + +// Execute the update function +updateInodeOwner(workbookId); diff --git a/recipe-portal/recipes/workbooks/update-owner.md b/recipe-portal/recipes/workbooks/update-owner.md new file mode 100644 index 00000000..bdacc299 --- /dev/null +++ b/recipe-portal/recipes/workbooks/update-owner.md @@ -0,0 +1,25 @@ +# Update Owner + +## API Endpoints Used + +- `PATCH /v2/files/{inodeId}` ā [Update Inode](https://help.sigmacomputing.com/reference/updateinode) + +## Expected Output + +- Confirmation of successful ownership transfer +- Updated workbook details showing new owner information +- Previous and current owner comparison + +## Use Cases + +- Transfer workbook ownership when employees change roles +- Reassign content ownership during organizational changes +- Manage workbook ownership for compliance and governance +- Consolidate content ownership for better management + +## Important Notes + +- Requires valid MEMBERID (new owner) and WORKBOOK_ID in environment variables +- Ownership change affects permissions and administrative control +- Original owner may lose access depending on sharing settings +- Consider sharing permissions before transferring ownership \ No newline at end of file diff --git a/recipe-portal/tailwind.config.js b/recipe-portal/tailwind.config.js new file mode 100644 index 00000000..47bc0bad --- /dev/null +++ b/recipe-portal/tailwind.config.js @@ -0,0 +1,12 @@ +/** @type {import('tailwindcss').Config} */ +module.exports = { + content: [ + './pages/**/*.{js,ts,jsx,tsx,mdx}', + './components/**/*.{js,ts,jsx,tsx,mdx}', + './app/**/*.{js,ts,jsx,tsx,mdx}', + ], + theme: { + extend: {}, + }, + plugins: [], +} \ No newline at end of file diff --git a/recipe-portal/tsconfig.json b/recipe-portal/tsconfig.json new file mode 100644 index 00000000..46512d54 --- /dev/null +++ b/recipe-portal/tsconfig.json @@ -0,0 +1,28 @@ +{ + "compilerOptions": { + "target": "es5", + "lib": ["dom", "dom.iterable", "es6"], + "allowJs": true, + "skipLibCheck": true, + "strict": true, + "noEmit": true, + "esModuleInterop": true, + "module": "esnext", + "moduleResolution": "bundler", + "resolveJsonModule": true, + "isolatedModules": true, + "jsx": "preserve", + "incremental": true, + "plugins": [ + { + "name": "next" + } + ], + "baseUrl": ".", + "paths": { + "@/*": ["./*"] + } + }, + "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"], + "exclude": ["node_modules"] +} \ No newline at end of file