diff --git a/auto-detect-newman.html b/auto-detect-newman.html
index 027381d..814042a 100644
--- a/auto-detect-newman.html
+++ b/auto-detect-newman.html
@@ -364,7 +364,7 @@
-
Timestamp: 9/15/2025, 4:45:45 PM
+
Timestamp: 9/16/2025, 5:55:19 PM
API Spec: Test API
Postman Collection: Test Newman Collection
@@ -441,7 +441,7 @@
Swagger Coverage Report
hljs.highlightAll();
// coverageData from server
- let coverageData = [{"method":"GET","path":"/users","name":"getUsers","statusCode":"200","tags":[],"expectedStatusCodes":["200"],"apiName":"Test API","sourceFile":"test-api.yaml","unmatched":false,"matchedRequests":[{"name":"Get Users","rawUrl":"https://api.example.com/users","method":"GET","testedStatusCodes":["200"],"testScripts":"// Status code is 200"}]},{"method":"POST","path":"/users","name":"createUser","statusCode":"201","tags":[],"expectedStatusCodes":["201","400"],"apiName":"Test API","sourceFile":"test-api.yaml","unmatched":false,"matchedRequests":[{"name":"Create User","rawUrl":"https://api.example.com/users","method":"POST","testedStatusCodes":["201"],"testScripts":"// Status code is 201"}]},{"method":"POST","path":"/users","name":"createUser","statusCode":"400","tags":[],"expectedStatusCodes":["201","400"],"apiName":"Test API","sourceFile":"test-api.yaml","unmatched":true,"matchedRequests":[]},{"method":"GET","path":"/users/{id}","name":"getUserById","statusCode":"200","tags":[],"expectedStatusCodes":["200","404"],"apiName":"Test API","sourceFile":"test-api.yaml","unmatched":true,"matchedRequests":[]},{"method":"GET","path":"/users/{id}","name":"getUserById","statusCode":"404","tags":[],"expectedStatusCodes":["200","404"],"apiName":"Test API","sourceFile":"test-api.yaml","unmatched":true,"matchedRequests":[]}];
+ let coverageData = [{"method":"GET","path":"/users","name":"getUsers","statusCode":"200","tags":[],"expectedStatusCodes":["200"],"apiName":"Test API","sourceFile":"test-api.yaml","operationId":"getUsers","unmatched":false,"matchedRequests":[{"name":"Get Users","rawUrl":"https://api.example.com/users","method":"GET","testedStatusCodes":["200"],"testScripts":"// Status code is 200"}]},{"method":"POST","path":"/users","name":"createUser","statusCode":"201","tags":[],"expectedStatusCodes":["201","400"],"apiName":"Test API","sourceFile":"test-api.yaml","operationId":"createUser","unmatched":false,"matchedRequests":[{"name":"Create User","rawUrl":"https://api.example.com/users","method":"POST","testedStatusCodes":["201"],"testScripts":"// Status code is 201"}]},{"method":"POST","path":"/users","name":"createUser","statusCode":"400","tags":[],"expectedStatusCodes":["201","400"],"apiName":"Test API","sourceFile":"test-api.yaml","operationId":"createUser","unmatched":true,"matchedRequests":[]},{"method":"GET","path":"/users/{id}","name":"getUserById","statusCode":"200","tags":[],"expectedStatusCodes":["200","404"],"apiName":"Test API","sourceFile":"test-api.yaml","operationId":"getUserById","unmatched":true,"matchedRequests":[]},{"method":"GET","path":"/users/{id}","name":"getUserById","statusCode":"404","tags":[],"expectedStatusCodes":["200","404"],"apiName":"Test API","sourceFile":"test-api.yaml","operationId":"getUserById","unmatched":true,"matchedRequests":[]}];
let apiCount = 1;
// Merge duplicates for display only
diff --git a/cli.js b/cli.js
index c7379d7..7855b3a 100644
--- a/cli.js
+++ b/cli.js
@@ -11,16 +11,18 @@ const { loadNewmanReport, extractRequestsFromNewman } = require("./lib/newman");
const { matchOperationsDetailed } = require("./lib/match");
const { generateHtmlReport } = require("./lib/report");
const { loadExcelSpec } = require("./lib/excel");
+const { loadAndParseProto, extractOperationsFromProto } = require("./lib/grpc");
+const { loadAndParseGraphQL, extractOperationsFromGraphQL } = require("./lib/graphql");
const program = new Command();
program
.name("swagger-coverage-cli")
.description(
- "CLI tool for comparing OpenAPI/Swagger specifications with a Postman collection or Newman run report, producing an enhanced HTML report"
+ "CLI tool for comparing API specifications (OpenAPI/Swagger, gRPC, GraphQL) with a Postman collection or Newman run report, producing an enhanced HTML report"
)
.version("4.0.0")
- .argument("
", "Path(s) to the Swagger/OpenAPI file(s) (JSON or YAML). Use comma-separated values for multiple files.")
+ .argument("", "Path(s) to the API specification file(s) (OpenAPI/Swagger JSON/YAML, gRPC .proto, GraphQL .graphql/.gql, or CSV). Use comma-separated values for multiple files.")
.argument("", "Path to the Postman collection (JSON) or Newman run report (JSON).")
.option("-v, --verbose", "Show verbose debug info")
.option("--strict-query", "Enable strict validation of query parameters")
@@ -39,24 +41,50 @@ program
let allSpecOperations = [];
let allSpecNames = [];
const excelExtensions = [".xlsx", ".xls", ".csv"];
+ const grpcExtensions = [".proto"];
+ const graphqlExtensions = [".graphql", ".gql"];
- // Process each swagger file
+ // Process each API specification file
for (const swaggerFile of files) {
const ext = path.extname(swaggerFile).toLowerCase();
let specOperations;
let specName;
if (excelExtensions.includes(ext)) {
- // Parse Excel
+ // Parse Excel/CSV
specOperations = loadExcelSpec(swaggerFile);
specName = path.basename(swaggerFile);
+ } else if (grpcExtensions.includes(ext)) {
+ // Parse gRPC proto file
+ const proto = loadAndParseProto(swaggerFile);
+ specName = proto.package || path.basename(swaggerFile, ext);
+ if (verbose) {
+ console.log(
+ "gRPC proto file loaded successfully:",
+ specName,
+ `(${proto.services.length} services)`
+ );
+ }
+ specOperations = extractOperationsFromProto(proto, verbose);
+ } else if (graphqlExtensions.includes(ext)) {
+ // Parse GraphQL schema file
+ const schema = loadAndParseGraphQL(swaggerFile);
+ specName = path.basename(swaggerFile, ext);
+ if (verbose) {
+ console.log(
+ "GraphQL schema loaded successfully:",
+ specName,
+ `(${schema.queries.length + schema.mutations.length + schema.subscriptions.length} operations)`
+ );
+ }
+ specOperations = extractOperationsFromGraphQL(schema, verbose);
} else {
- // Original Swagger flow
+ // Original Swagger/OpenAPI flow
const spec = await loadAndParseSpec(swaggerFile);
specName = spec.info.title;
if (verbose) {
console.log(
- "Specification loaded successfully:",
+ "OpenAPI/Swagger specification loaded successfully:",
specName,
spec.info.version
);
diff --git a/lib/graphql.js b/lib/graphql.js
new file mode 100644
index 0000000..3fa9ad6
--- /dev/null
+++ b/lib/graphql.js
@@ -0,0 +1,206 @@
+// graphql.js
+
+'use strict';
+
+const fs = require('fs');
+const path = require('path');
+
+/**
+ * Load and parse GraphQL schema file (.graphql, .gql)
+ */
+function loadAndParseGraphQL(filePath) {
+ if (!fs.existsSync(filePath)) {
+ throw new Error(`GraphQL schema file not found: ${filePath}`);
+ }
+
+ const content = fs.readFileSync(filePath, 'utf8');
+
+ // Basic validation - check if it looks like a GraphQL schema
+ const hasValidGraphQLKeywords = /\b(type|schema|Query|Mutation|Subscription|input|enum|interface|union)\b/i.test(content);
+ if (!hasValidGraphQLKeywords) {
+ throw new Error('Invalid GraphQL schema format');
+ }
+
+ return parseGraphQLContent(content);
+}
+
+/**
+ * Parse GraphQL schema content and extract types, queries, mutations, subscriptions
+ */
+function parseGraphQLContent(content) {
+ // Remove comments
+ const cleanContent = content
+ .replace(/#.*$/gm, '')
+ .replace(/"""[\s\S]*?"""/g, '')
+ .replace(/"[^"]*"/g, '');
+
+ const schema = {
+ queries: [],
+ mutations: [],
+ subscriptions: [],
+ types: []
+ };
+
+ // Extract root schema definition
+ const schemaMatch = cleanContent.match(/schema\s*\{([^}]+)\}/);
+ let rootTypes = {
+ query: 'Query',
+ mutation: 'Mutation',
+ subscription: 'Subscription'
+ };
+
+ if (schemaMatch) {
+ const schemaBody = schemaMatch[1];
+ const queryMatch = schemaBody.match(/query:\s*(\w+)/);
+ const mutationMatch = schemaBody.match(/mutation:\s*(\w+)/);
+ const subscriptionMatch = schemaBody.match(/subscription:\s*(\w+)/);
+
+ if (queryMatch) rootTypes.query = queryMatch[1];
+ if (mutationMatch) rootTypes.mutation = mutationMatch[1];
+ if (subscriptionMatch) rootTypes.subscription = subscriptionMatch[1];
+ }
+
+ // Extract type definitions
+ const typeRegex = /type\s+(\w+)\s*\{([^}]+)\}/g;
+ let typeMatch;
+
+ while ((typeMatch = typeRegex.exec(cleanContent)) !== null) {
+ const typeName = typeMatch[1];
+ const typeBody = typeMatch[2];
+
+ // Extract fields from type
+ const fields = extractFieldsFromType(typeBody);
+
+ const typeInfo = {
+ name: typeName,
+ fields: fields
+ };
+
+ // Categorize based on root types
+ if (typeName === rootTypes.query) {
+ schema.queries = fields;
+ } else if (typeName === rootTypes.mutation) {
+ schema.mutations = fields;
+ } else if (typeName === rootTypes.subscription) {
+ schema.subscriptions = fields;
+ } else {
+ schema.types.push(typeInfo);
+ }
+ }
+
+ return schema;
+}
+
+/**
+ * Extract fields from a GraphQL type definition
+ */
+function extractFieldsFromType(typeBody) {
+ const fields = [];
+
+ // Match field definitions: fieldName(args): ReturnType
+ const fieldRegex = /(\w+)(\([^)]*\))?\s*:\s*([^,\n]+)/g;
+ let fieldMatch;
+
+ while ((fieldMatch = fieldRegex.exec(typeBody)) !== null) {
+ const fieldName = fieldMatch[1];
+ const args = fieldMatch[2] || '';
+ const returnType = fieldMatch[3].trim();
+
+ // Parse arguments if present
+ const parsedArgs = parseArguments(args);
+
+ fields.push({
+ name: fieldName,
+ type: returnType,
+ arguments: parsedArgs
+ });
+ }
+
+ return fields;
+}
+
+/**
+ * Parse GraphQL field arguments
+ */
+function parseArguments(argsString) {
+ if (!argsString || argsString === '()') {
+ return [];
+ }
+
+ const args = [];
+ // Remove parentheses and split by comma
+ const argContent = argsString.slice(1, -1);
+ const argParts = argContent.split(',');
+
+ for (const argPart of argParts) {
+ const trimmed = argPart.trim();
+ if (trimmed) {
+ const colonIndex = trimmed.indexOf(':');
+ if (colonIndex > 0) {
+ const argName = trimmed.substring(0, colonIndex).trim();
+ const argType = trimmed.substring(colonIndex + 1).trim();
+ args.push({
+ name: argName,
+ type: argType
+ });
+ }
+ }
+ }
+
+ return args;
+}
+
+/**
+ * Extract operations from parsed GraphQL schema
+ * Each query, mutation, and subscription becomes an "operation"
+ */
+function extractOperationsFromGraphQL(schema, verbose = false) {
+ const operations = [];
+
+ // Process queries
+ for (const query of schema.queries) {
+ operations.push(createGraphQLOperation(query, 'query'));
+ }
+
+ // Process mutations
+ for (const mutation of schema.mutations) {
+ operations.push(createGraphQLOperation(mutation, 'mutation'));
+ }
+
+ // Process subscriptions
+ for (const subscription of schema.subscriptions) {
+ operations.push(createGraphQLOperation(subscription, 'subscription'));
+ }
+
+ if (verbose) {
+ console.log(`Extracted ${operations.length} GraphQL operations from schema`);
+ }
+
+ return operations;
+}
+
+/**
+ * Create an operation object for a GraphQL field
+ */
+function createGraphQLOperation(field, operationType) {
+ return {
+ method: 'POST', // GraphQL typically uses POST
+ path: '/graphql', // Standard GraphQL endpoint
+ protocol: 'graphql',
+ operationType: operationType, // query, mutation, subscription
+ fieldName: field.name,
+ returnType: field.type,
+ arguments: field.arguments,
+ operationId: `${operationType}_${field.name}`,
+ summary: `GraphQL ${operationType}: ${field.name}`,
+ tags: [operationType, 'GraphQL'],
+ expectedStatusCodes: ['200'], // GraphQL typically returns 200 for both success and errors
+ statusCode: '200' // Default success
+ };
+}
+
+module.exports = {
+ loadAndParseGraphQL,
+ extractOperationsFromGraphQL,
+ parseGraphQLContent
+};
\ No newline at end of file
diff --git a/lib/grpc.js b/lib/grpc.js
new file mode 100644
index 0000000..44c55a8
--- /dev/null
+++ b/lib/grpc.js
@@ -0,0 +1,138 @@
+// grpc.js
+
+'use strict';
+
+const fs = require('fs');
+const path = require('path');
+
+/**
+ * Load and parse gRPC .proto file
+ * For now, we'll use a simple regex-based parser for basic proto3 syntax
+ * In production, you'd want to use a proper protobuf parser like protobufjs
+ */
+function loadAndParseProto(filePath) {
+ if (!fs.existsSync(filePath)) {
+ throw new Error(`Proto file not found: ${filePath}`);
+ }
+
+ const content = fs.readFileSync(filePath, 'utf8');
+
+ // Basic validation - check if it looks like a proto file
+ if (!content.includes('syntax') && !content.includes('service')) {
+ throw new Error('Invalid proto file format');
+ }
+
+ return parseProtoContent(content);
+}
+
+/**
+ * Parse proto file content and extract services and methods
+ */
+function parseProtoContent(content) {
+ const services = [];
+
+ // Remove comments
+ const cleanContent = content
+ .replace(/\/\/.*$/gm, '')
+ .replace(/\/\*[\s\S]*?\*\//g, '');
+
+ // Extract package name
+ const packageMatch = cleanContent.match(/package\s+([a-zA-Z0-9_.]+);/);
+ const packageName = packageMatch ? packageMatch[1] : '';
+
+ // Extract services
+ const serviceRegex = /service\s+(\w+)\s*\{([^}]+)\}/g;
+ let serviceMatch;
+
+ while ((serviceMatch = serviceRegex.exec(cleanContent)) !== null) {
+ const serviceName = serviceMatch[1];
+ const serviceBody = serviceMatch[2];
+
+ // Extract methods from service
+ const methods = extractMethodsFromService(serviceBody, serviceName, packageName);
+
+ services.push({
+ name: serviceName,
+ package: packageName,
+ methods: methods
+ });
+ }
+
+ return {
+ package: packageName,
+ services: services
+ };
+}
+
+/**
+ * Extract RPC methods from service definition
+ */
+function extractMethodsFromService(serviceBody, serviceName, packageName) {
+ const methods = [];
+
+ // Match RPC method definitions
+ const rpcRegex = /rpc\s+(\w+)\s*\(\s*(\w+)\s*\)\s*returns\s*\(\s*(\w+)\s*\)/g;
+ let methodMatch;
+
+ while ((methodMatch = rpcRegex.exec(serviceBody)) !== null) {
+ const methodName = methodMatch[1];
+ const requestType = methodMatch[2];
+ const responseType = methodMatch[3];
+
+ methods.push({
+ name: methodName,
+ service: serviceName,
+ package: packageName,
+ requestType: requestType,
+ responseType: responseType,
+ fullName: `${packageName ? packageName + '.' : ''}${serviceName}/${methodName}`
+ });
+ }
+
+ return methods;
+}
+
+/**
+ * Extract operations from parsed proto definition
+ * Each RPC method becomes an "operation" similar to REST endpoints
+ */
+function extractOperationsFromProto(proto, verbose = false) {
+ const operations = [];
+
+ for (const service of proto.services) {
+ for (const method of service.methods) {
+ // gRPC operations are defined differently than REST
+ // We'll create a structure similar to REST operations for consistency
+ const operation = {
+ method: 'POST', // gRPC typically uses HTTP/2 POST
+ path: `/${method.fullName}`, // gRPC path format: /package.service/method
+ protocol: 'grpc',
+ serviceName: service.name,
+ methodName: method.name,
+ requestType: method.requestType,
+ responseType: method.responseType,
+ fullName: method.fullName,
+ operationId: `${service.name}_${method.name}`,
+ summary: `gRPC method ${method.name} in service ${service.name}`,
+ tags: [service.name, 'gRPC'],
+ // gRPC typically has success (0) and various error codes
+ expectedStatusCodes: ['200'], // HTTP status for successful gRPC calls
+ statusCode: '200' // Default success
+ };
+
+ operations.push(operation);
+ }
+ }
+
+ if (verbose) {
+ console.log(`Extracted ${operations.length} gRPC operations from proto file`);
+ }
+
+ return operations;
+}
+
+module.exports = {
+ loadAndParseProto,
+ extractOperationsFromProto,
+ parseProtoContent
+};
\ No newline at end of file
diff --git a/lib/match.js b/lib/match.js
index 4557360..1e0a674 100644
--- a/lib/match.js
+++ b/lib/match.js
@@ -84,6 +84,13 @@ function matchOperationsDetailed(specOps, postmanReqs, { verbose, strictQuery, s
expectedStatusCodes: specOp.expectedStatusCodes || [],
apiName: specOp.apiName || "",
sourceFile: specOp.sourceFile || "",
+ // Copy additional protocol-specific fields
+ ...(specOp.protocol && { protocol: specOp.protocol }),
+ ...(specOp.operationId && { operationId: specOp.operationId }),
+ ...(specOp.serviceName && { serviceName: specOp.serviceName }),
+ ...(specOp.methodName && { methodName: specOp.methodName }),
+ ...(specOp.operationType && { operationType: specOp.operationType }),
+ ...(specOp.fieldName && { fieldName: specOp.fieldName }),
unmatched: true,
matchedRequests: []
};
@@ -119,6 +126,7 @@ function matchOperationsDetailed(specOps, postmanReqs, { verbose, strictQuery, s
* - Checks method, path, and optional status-code presence in pmReq.testedStatusCodes.
* - If strictQuery is enabled, ensures required query params are present and conform.
* - If strictBody is enabled, ensures requestBody is JSON (if spec says application/json).
+ * - Extended to support gRPC and GraphQL protocols
*
* @param {Object} specOp
* @param {Object} pmReq
@@ -127,6 +135,21 @@ function matchOperationsDetailed(specOps, postmanReqs, { verbose, strictQuery, s
* @returns {boolean} whether pmReq matches specOp
*/
function doesMatch(specOp, pmReq, { strictQuery, strictBody }) {
+ // Handle different protocols
+ if (specOp.protocol === 'grpc') {
+ return matchGrpcRequest(specOp, pmReq, { strictQuery, strictBody });
+ } else if (specOp.protocol === 'graphql') {
+ return matchGraphQLRequest(specOp, pmReq, { strictQuery, strictBody });
+ } else {
+ // Traditional REST/HTTP matching
+ return matchRestRequest(specOp, pmReq, { strictQuery, strictBody });
+ }
+}
+
+/**
+ * Match REST/HTTP requests (original logic)
+ */
+function matchRestRequest(specOp, pmReq, { strictQuery, strictBody }) {
// 1. Method
if (pmReq.method.toLowerCase() !== specOp.method.toLowerCase()) {
return false;
@@ -162,6 +185,109 @@ function doesMatch(specOp, pmReq, { strictQuery, strictBody }) {
return true;
}
+/**
+ * Match gRPC requests
+ */
+function matchGrpcRequest(specOp, pmReq, { strictQuery, strictBody }) {
+ // 1. Method - gRPC typically uses POST
+ if (pmReq.method.toLowerCase() !== 'post') {
+ return false;
+ }
+
+ // 2. Check if URL matches gRPC pattern
+ // gRPC over HTTP/2 or gRPC-Web might use different URL patterns
+ const urlWithoutQuery = pmReq.rawUrl.split('?')[0];
+
+ // Try multiple patterns for gRPC URL matching
+ const patterns = [
+ new RegExp(`/${specOp.serviceName}/${specOp.methodName}$`), // Simple format
+ new RegExp(`/${specOp.fullName}$`), // Full package.service/method
+ new RegExp(`/${specOp.methodName}$`), // Just method name
+ new RegExp(specOp.path.replace(/\//g, '\\/') + '$') // Exact path match
+ ];
+
+ const matchesAnyPattern = patterns.some(pattern => pattern.test(urlWithoutQuery));
+
+ if (!matchesAnyPattern) {
+ return false;
+ }
+
+ // 3. Check if request body contains gRPC-related content
+ if (strictBody && pmReq.body) {
+ // For gRPC, the body might contain protobuf data or JSON representation
+ // This is a basic check - in practice you'd want more sophisticated validation
+ const bodyStr = typeof pmReq.body === 'string' ? pmReq.body : JSON.stringify(pmReq.body);
+
+ // For gRPC, we don't necessarily expect the protobuf message type name in the JSON body
+ // Instead, we can just check if the body is valid JSON (for gRPC-Web) or has content
+ try {
+ if (bodyStr.trim()) {
+ JSON.parse(bodyStr); // Validate it's valid JSON
+ // Additional validation could check if body structure matches expected protobuf fields
+ // For now, we'll accept any valid JSON body for gRPC calls
+ }
+ } catch (e) {
+ // If it's not valid JSON, it might be binary protobuf data, which is also valid
+ // Accept any non-empty body for gRPC
+ if (bodyStr.trim() === '') {
+ return false; // Empty body might not be valid for some gRPC methods
+ }
+ }
+ }
+
+ return true;
+}
+
+/**
+ * Match GraphQL requests
+ */
+function matchGraphQLRequest(specOp, pmReq, { strictQuery, strictBody }) {
+ // 1. Method - GraphQL typically uses POST
+ if (pmReq.method.toLowerCase() !== 'post') {
+ return false;
+ }
+
+ // 2. Path - GraphQL typically uses /graphql endpoint
+ const urlWithoutQuery = pmReq.rawUrl.split('?')[0];
+ if (!urlWithoutQuery.includes('/graphql')) {
+ return false;
+ }
+
+ // 3. Check if request body contains the GraphQL operation
+ if (strictBody && pmReq.body) {
+ const bodyStr = typeof pmReq.body === 'string' ? pmReq.body : JSON.stringify(pmReq.body);
+
+ try {
+ const bodyObj = JSON.parse(bodyStr);
+
+ // GraphQL requests should have a "query" field
+ if (!bodyObj.query) {
+ return false;
+ }
+
+ // Check if the query contains the correct operation type
+ const queryStr = bodyObj.query.toLowerCase();
+ if (!queryStr.includes(specOp.operationType)) {
+ return false;
+ }
+
+ // More specific matching: check if the field name appears in the right context
+ // For queries: look for "query" or field name after "{"
+ // For mutations: look for "mutation" or field name after "{"
+ const fieldPattern = new RegExp(`\\b${specOp.fieldName}\\s*[\\(\\{]`, 'i');
+ if (!fieldPattern.test(queryStr)) {
+ return false;
+ }
+
+ } catch (e) {
+ // If body is not valid JSON, reject the match
+ return false;
+ }
+ }
+
+ return true;
+}
+
/**
* checkQueryParamsStrict:
* - Example approach verifying required query params from specOp.parameters
diff --git a/lib/newman.js b/lib/newman.js
index 8718987..d6aeb67 100644
--- a/lib/newman.js
+++ b/lib/newman.js
@@ -46,13 +46,25 @@ function extractRequestsFromNewman(newmanReport, verbose = false) {
queryParams = request.url.query.map(q => ({ key: q.key, value: q.value }));
}
- // Extract body information
+ // Extract body information - include actual body content for gRPC/GraphQL matching
let bodyInfo = null;
+ let body = null;
if (request.body && request.body.mode) {
bodyInfo = {
mode: request.body.mode,
content: request.body[request.body.mode]
};
+
+ // Extract actual body content
+ if (request.body.mode === 'raw') {
+ body = request.body.raw;
+ } else if (request.body.mode === 'formdata' && request.body.formdata) {
+ body = request.body.formdata;
+ } else if (request.body.mode === 'urlencoded' && request.body.urlencoded) {
+ body = request.body.urlencoded;
+ } else if (request.body[request.body.mode]) {
+ body = request.body[request.body.mode];
+ }
}
// Extract tested status codes from actual response
@@ -84,6 +96,7 @@ function extractRequestsFromNewman(newmanReport, verbose = false) {
rawUrl,
queryParams,
bodyInfo,
+ body, // Add actual body content for gRPC/GraphQL matching
testedStatusCodes,
testScripts: testScripts.trim(),
// Additional Newman-specific data
diff --git a/lib/postman.js b/lib/postman.js
index 6c92b2c..be47524 100644
--- a/lib/postman.js
+++ b/lib/postman.js
@@ -48,13 +48,25 @@ function extractRequestsFromPostman(collection, verbose = false) {
queryParams = req.url.query.map(q => ({ key: q.key, value: q.value }));
}
- // Body
+ // Body - extract more detailed body information for gRPC/GraphQL matching
let bodyInfo = null;
+ let body = null;
if (req.body && req.body.mode) {
bodyInfo = {
mode: req.body.mode,
content: req.body[req.body.mode] // formdata, raw, urlencoded и т.д.
};
+
+ // Extract actual body content for matching
+ if (req.body.mode === 'raw') {
+ body = req.body.raw;
+ } else if (req.body.mode === 'formdata' && req.body.formdata) {
+ body = req.body.formdata;
+ } else if (req.body.mode === 'urlencoded' && req.body.urlencoded) {
+ body = req.body.urlencoded;
+ } else if (req.body[req.body.mode]) {
+ body = req.body[req.body.mode];
+ }
}
// Ищем status-коды в тест-скриптах
@@ -100,6 +112,7 @@ function extractRequestsFromPostman(collection, verbose = false) {
rawUrl,
queryParams,
bodyInfo,
+ body, // Add actual body content for gRPC/GraphQL matching
testedStatusCodes: Array.from(testedStatusCodes),
testScripts: testScripts.trim() // Include aggregated test scripts
});
diff --git a/package.json b/package.json
index e960b97..c5aa9f8 100644
--- a/package.json
+++ b/package.json
@@ -20,6 +20,9 @@
"keywords": [
"swagger",
"openapi",
+ "grpc",
+ "graphql",
+ "protobuf",
"coverage",
"cli",
"postman",
diff --git a/readme.md b/readme.md
index e866e2b..16df322 100644
--- a/readme.md
+++ b/readme.md
@@ -30,9 +30,13 @@ Check out the [Example!](https://dreamquality.github.io/swagger-coverage-cli)**
## Introduction
-**swagger-coverage-cli** is a tool that helps you **measure how much of your OpenAPI/Swagger-documented API is actually covered by your Postman tests**. It reads inputs from:
+**swagger-coverage-cli** is a tool that helps you **measure how much of your API documentation is actually covered by your Postman tests**. It reads inputs from:
-1. **Single or Multiple OpenAPI/Swagger** specifications (version 2 or 3) in either JSON or YAML format, or **CSV** files containing API documentation.
+1. **Single or Multiple API specifications** in various formats:
+ - **OpenAPI/Swagger** specifications (version 2 or 3) in JSON or YAML format
+ - **gRPC** protocol buffer files (`.proto`)
+ - **GraphQL** schema files (`.graphql`, `.gql`)
+ - **CSV** files containing API documentation
2. A **Postman** collection (JSON) that contains requests and test scripts, **OR** a **Newman run report** (JSON) that contains actual execution results.
The tool supports processing **multiple API specifications in a single run**, making it ideal for organizations managing multiple APIs or microservices. Using this information, the CLI **calculates a unified coverage percentage** and produces a **detailed HTML report** indicating which endpoints and status codes are validated across all APIs, and which are missing tests.
@@ -41,12 +45,13 @@ The tool supports processing **multiple API specifications in a single run**, ma
## Features
-- **Easy to Use**: Simple CLI interface with just two main arguments (the Swagger file and the Postman collection or Newman report).
+- **Easy to Use**: Simple CLI interface with just two main arguments (the API specification and the Postman collection or Newman report).
+- **Multiple Protocols**: Supports REST APIs (OpenAPI/Swagger), gRPC (Protocol Buffers), and GraphQL schemas.
- **Multiple Input Types**: Supports both Postman collections and Newman run reports for maximum flexibility.
-- **Auto-Detection**: Automatically detects Newman report format even without explicit flags.
-- **Multiple API Support**: Process multiple Swagger/OpenAPI specifications in a single run for comprehensive API portfolio management.
+- **Auto-Detection**: Automatically detects Newman report format and API specification types.
+- **Multiple API Support**: Process multiple API specifications in a single run for comprehensive API portfolio management.
- **Unified Reporting**: Generate consolidated reports that show coverage across all APIs while maintaining individual API identification.
-- **Strict Matching (Optional)**: Enforce strict checks for query parameters, request bodies, and more.
+- **Strict Matching (Optional)**: Enforce strict checks for query parameters, request bodies, and protocol-specific validation.
- **HTML Reports**: Generates `coverage-report.html` that shows which endpoints are covered and which are not.
- **Extensible**: Modular code structure (Node.js) allows customization of matching logic, query parameter checks, status code detection, etc.
- **CSV Support**: Allows API documentation to be provided in a CSV format for flexibility and ease of use.
@@ -367,11 +372,12 @@ If all criteria are satisfied, the operation is **matched** (covered). Otherwise
## Supported File Formats
-**Swagger/OpenAPI/.csv**:
+**API Specifications**:
-- **JSON** or **YAML**
-- **OpenAPI v2 (Swagger 2.0)** or **OpenAPI v3.x**
-- **CSV**: API documentation can be provided in CSV format following the specified structure.
+- **OpenAPI/Swagger**: JSON or YAML format, v2 (Swagger 2.0) or v3.x
+- **gRPC**: Protocol Buffer files (`.proto`)
+- **GraphQL**: Schema files (`.graphql`, `.gql`)
+- **CSV**: API documentation in CSV format following the specified structure
**Postman**:
@@ -412,6 +418,93 @@ The tool supports two types of input for test data:
In addition to traditional OpenAPI/Swagger specifications, **swagger-coverage-cli** supports API documentation provided in a **CSV** format. This allows for a more flexible and easily editable documentation process, especially for teams that prefer spreadsheet-based documentation.
+### gRPC Support
+
+**swagger-coverage-cli** now supports gRPC APIs through Protocol Buffer (`.proto`) files:
+
+#### gRPC Features:
+- **Protocol Buffer Parsing**: Automatically parses `.proto` files to extract service definitions
+- **Service and Method Detection**: Identifies all RPC methods within services
+- **gRPC-Web Compatibility**: Supports both traditional gRPC and gRPC-Web request patterns
+- **Flexible URL Matching**: Matches various gRPC URL patterns including full package paths
+
+#### Example gRPC Usage:
+
+```bash
+swagger-coverage-cli user-service.proto grpc-collection.json --strict-body
+```
+
+**Sample .proto file:**
+```protobuf
+syntax = "proto3";
+package user.v1;
+
+service UserService {
+ rpc GetUser(GetUserRequest) returns (GetUserResponse);
+ rpc CreateUser(CreateUserRequest) returns (CreateUserResponse);
+}
+
+message GetUserRequest {
+ string user_id = 1;
+}
+
+message GetUserResponse {
+ User user = 1;
+}
+```
+
+#### gRPC Postman Integration:
+- Use POST method for gRPC calls
+- URL format: `http://host:port/package.service/method` or `/service/method`
+- Body should contain JSON representation of protobuf data
+- Example: `{"user_id": "123"}` for GetUserRequest
+
+### GraphQL Support
+
+**swagger-coverage-cli** supports GraphQL APIs through schema files (`.graphql` or `.gql`):
+
+#### GraphQL Features:
+- **Schema Parsing**: Automatically parses GraphQL schema files
+- **Operation Detection**: Identifies queries, mutations, and subscriptions
+- **Field-level Matching**: Matches specific GraphQL operations to Postman requests
+- **Argument Validation**: Supports field arguments and input types
+
+#### Example GraphQL Usage:
+
+```bash
+swagger-coverage-cli blog-schema.graphql graphql-collection.json --strict-body
+```
+
+**Sample .graphql file:**
+```graphql
+type Query {
+ user(id: ID!): User
+ users(first: Int, after: String): UserConnection
+}
+
+type Mutation {
+ createUser(input: CreateUserInput!): CreateUserPayload
+ updateUser(id: ID!, input: UpdateUserInput!): UpdateUserPayload
+}
+
+type User {
+ id: ID!
+ name: String!
+ email: String!
+}
+```
+
+#### GraphQL Postman Integration:
+- Use POST method to `/graphql` endpoint
+- Body should contain GraphQL query with variables
+- Example:
+```json
+{
+ "query": "query GetUser($id: ID!) { user(id: $id) { id name email } }",
+ "variables": { "id": "123" }
+}
+```
+
#### CSV Structure
Your CSV file should adhere to the following structure to ensure compatibility with **swagger-coverage-cli**:
diff --git a/test/fixtures/blog-schema.graphql b/test/fixtures/blog-schema.graphql
new file mode 100644
index 0000000..26063cf
--- /dev/null
+++ b/test/fixtures/blog-schema.graphql
@@ -0,0 +1,86 @@
+type Query {
+ user(id: ID!): User
+ users(first: Int, after: String): UserConnection
+ posts(userId: ID!): [Post!]!
+}
+
+type Mutation {
+ createUser(input: CreateUserInput!): CreateUserPayload
+ updateUser(id: ID!, input: UpdateUserInput!): UpdateUserPayload
+ deleteUser(id: ID!): DeleteUserPayload
+ createPost(input: CreatePostInput!): CreatePostPayload
+}
+
+type Subscription {
+ userUpdated(userId: ID!): User
+ postCreated: Post
+}
+
+type User {
+ id: ID!
+ name: String!
+ email: String!
+ posts: [Post!]!
+ createdAt: String!
+}
+
+type Post {
+ id: ID!
+ title: String!
+ content: String!
+ author: User!
+ createdAt: String!
+}
+
+type UserConnection {
+ edges: [UserEdge!]!
+ pageInfo: PageInfo!
+}
+
+type UserEdge {
+ node: User!
+ cursor: String!
+}
+
+type PageInfo {
+ hasNextPage: Boolean!
+ hasPreviousPage: Boolean!
+ startCursor: String
+ endCursor: String
+}
+
+input CreateUserInput {
+ name: String!
+ email: String!
+}
+
+input UpdateUserInput {
+ name: String
+ email: String
+}
+
+input CreatePostInput {
+ title: String!
+ content: String!
+ authorId: ID!
+}
+
+type CreateUserPayload {
+ user: User
+ errors: [String!]
+}
+
+type UpdateUserPayload {
+ user: User
+ errors: [String!]
+}
+
+type DeleteUserPayload {
+ success: Boolean!
+ errors: [String!]
+}
+
+type CreatePostPayload {
+ post: Post
+ errors: [String!]
+}
\ No newline at end of file
diff --git a/test/fixtures/graphql-collection.json b/test/fixtures/graphql-collection.json
new file mode 100644
index 0000000..cdffb94
--- /dev/null
+++ b/test/fixtures/graphql-collection.json
@@ -0,0 +1,78 @@
+{
+ "info": {
+ "name": "GraphQL Test Collection",
+ "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
+ },
+ "item": [
+ {
+ "name": "Get User Query",
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\"query\": \"query GetUser($id: ID!) { user(id: $id) { id name email } }\", \"variables\": {\"id\": \"123\"}}"
+ },
+ "url": {
+ "raw": "http://localhost:4000/graphql",
+ "protocol": "http",
+ "host": ["localhost"],
+ "port": "4000",
+ "path": ["graphql"]
+ }
+ },
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "exec": [
+ "pm.test('Status code is 200', function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ]
+ }
+ }
+ ]
+ },
+ {
+ "name": "Create User Mutation",
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\"query\": \"mutation CreateUser($input: CreateUserInput!) { createUser(input: $input) { user { id name email } errors } }\", \"variables\": {\"input\": {\"name\": \"Jane Doe\", \"email\": \"jane@example.com\"}}}"
+ },
+ "url": {
+ "raw": "http://localhost:4000/graphql",
+ "protocol": "http",
+ "host": ["localhost"],
+ "port": "4000",
+ "path": ["graphql"]
+ }
+ },
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "exec": [
+ "pm.test('Status code is 200', function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ]
+ }
+ }
+ ]
+ }
+ ]
+}
\ No newline at end of file
diff --git a/test/fixtures/grpc-collection.json b/test/fixtures/grpc-collection.json
new file mode 100644
index 0000000..e082a57
--- /dev/null
+++ b/test/fixtures/grpc-collection.json
@@ -0,0 +1,78 @@
+{
+ "info": {
+ "name": "gRPC Test Collection",
+ "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
+ },
+ "item": [
+ {
+ "name": "Get User",
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "value": "application/grpc"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\"user_id\": \"123\"}"
+ },
+ "url": {
+ "raw": "http://localhost:9090/user.v1.UserService/GetUser",
+ "protocol": "http",
+ "host": ["localhost"],
+ "port": "9090",
+ "path": ["user.v1.UserService", "GetUser"]
+ }
+ },
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "exec": [
+ "pm.test('Status code is 200', function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ]
+ }
+ }
+ ]
+ },
+ {
+ "name": "Create User",
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "value": "application/grpc"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\"name\": \"John Doe\", \"email\": \"john@example.com\"}"
+ },
+ "url": {
+ "raw": "http://localhost:9090/user.v1.UserService/CreateUser",
+ "protocol": "http",
+ "host": ["localhost"],
+ "port": "9090",
+ "path": ["user.v1.UserService", "CreateUser"]
+ }
+ },
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "exec": [
+ "pm.test('Status code is 200', function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ]
+ }
+ }
+ ]
+ }
+ ]
+}
\ No newline at end of file
diff --git a/test/fixtures/user-service.proto b/test/fixtures/user-service.proto
new file mode 100644
index 0000000..003337c
--- /dev/null
+++ b/test/fixtures/user-service.proto
@@ -0,0 +1,63 @@
+syntax = "proto3";
+
+package user.v1;
+
+service UserService {
+ rpc GetUser(GetUserRequest) returns (GetUserResponse);
+ rpc CreateUser(CreateUserRequest) returns (CreateUserResponse);
+ rpc UpdateUser(UpdateUserRequest) returns (UpdateUserResponse);
+ rpc DeleteUser(DeleteUserRequest) returns (DeleteUserResponse);
+ rpc ListUsers(ListUsersRequest) returns (ListUsersResponse);
+}
+
+message GetUserRequest {
+ string user_id = 1;
+}
+
+message GetUserResponse {
+ User user = 1;
+}
+
+message CreateUserRequest {
+ string name = 1;
+ string email = 2;
+}
+
+message CreateUserResponse {
+ User user = 1;
+}
+
+message UpdateUserRequest {
+ string user_id = 1;
+ string name = 2;
+ string email = 3;
+}
+
+message UpdateUserResponse {
+ User user = 1;
+}
+
+message DeleteUserRequest {
+ string user_id = 1;
+}
+
+message DeleteUserResponse {
+ bool success = 1;
+}
+
+message ListUsersRequest {
+ int32 page_size = 1;
+ string page_token = 2;
+}
+
+message ListUsersResponse {
+ repeated User users = 1;
+ string next_page_token = 2;
+}
+
+message User {
+ string id = 1;
+ string name = 2;
+ string email = 3;
+ int64 created_at = 4;
+}
\ No newline at end of file
diff --git a/test/graphql.test.js b/test/graphql.test.js
new file mode 100644
index 0000000..d194361
--- /dev/null
+++ b/test/graphql.test.js
@@ -0,0 +1,145 @@
+const { loadAndParseGraphQL, extractOperationsFromGraphQL } = require('../lib/graphql');
+const fs = require('fs');
+const path = require('path');
+
+describe('GraphQL Module', () => {
+ test('loadAndParseGraphQL should throw error if file does not exist', () => {
+ expect(() => loadAndParseGraphQL('nonexistent.graphql')).toThrow('GraphQL schema file not found');
+ });
+
+ test('loadAndParseGraphQL should parse GraphQL schema correctly', () => {
+ const schemaPath = path.resolve(__dirname, 'fixtures', 'blog-schema.graphql');
+ const schema = loadAndParseGraphQL(schemaPath);
+
+ expect(schema).toBeDefined();
+ expect(schema.queries).toHaveLength(3);
+ expect(schema.mutations).toHaveLength(4);
+ expect(schema.subscriptions).toHaveLength(2);
+ expect(schema.types.length).toBeGreaterThan(0);
+ });
+
+ test('extractOperationsFromGraphQL should create operations from schema', () => {
+ const schemaPath = path.resolve(__dirname, 'fixtures', 'blog-schema.graphql');
+ const schema = loadAndParseGraphQL(schemaPath);
+ const operations = extractOperationsFromGraphQL(schema);
+
+ expect(operations).toHaveLength(9); // 3 queries + 4 mutations + 2 subscriptions
+
+ // Check query operation
+ const userQuery = operations.find(op => op.fieldName === 'user');
+ expect(userQuery).toBeDefined();
+ expect(userQuery.method).toBe('POST');
+ expect(userQuery.protocol).toBe('graphql');
+ expect(userQuery.operationType).toBe('query');
+ expect(userQuery.path).toBe('/graphql');
+ expect(userQuery.tags).toContain('GraphQL');
+ expect(userQuery.tags).toContain('query');
+ expect(userQuery.arguments).toHaveLength(1);
+ expect(userQuery.arguments[0].name).toBe('id');
+ expect(userQuery.arguments[0].type).toBe('ID!');
+
+ // Check mutation operation
+ const createUserMutation = operations.find(op => op.fieldName === 'createUser');
+ expect(createUserMutation).toBeDefined();
+ expect(createUserMutation.operationType).toBe('mutation');
+ expect(createUserMutation.tags).toContain('mutation');
+
+ // Check subscription operation
+ const userUpdatedSub = operations.find(op => op.fieldName === 'userUpdated');
+ expect(userUpdatedSub).toBeDefined();
+ expect(userUpdatedSub.operationType).toBe('subscription');
+ expect(userUpdatedSub.tags).toContain('subscription');
+ });
+
+ test('should handle simple GraphQL schema', () => {
+ const simpleSchema = `
+type Query {
+ hello: String
+ user(id: ID!): User
+}
+
+type User {
+ id: ID!
+ name: String!
+}
+`;
+
+ // Create temporary schema file
+ const tempPath = path.resolve(__dirname, 'fixtures', 'temp-simple.graphql');
+ fs.writeFileSync(tempPath, simpleSchema);
+
+ try {
+ const schema = loadAndParseGraphQL(tempPath);
+ expect(schema.queries).toHaveLength(2);
+ expect(schema.mutations).toHaveLength(0);
+ expect(schema.subscriptions).toHaveLength(0);
+
+ const operations = extractOperationsFromGraphQL(schema);
+ expect(operations).toHaveLength(2);
+
+ const helloQuery = operations.find(op => op.fieldName === 'hello');
+ expect(helloQuery.arguments).toHaveLength(0);
+
+ const userQuery = operations.find(op => op.fieldName === 'user');
+ expect(userQuery.arguments).toHaveLength(1);
+ } finally {
+ // Clean up
+ if (fs.existsSync(tempPath)) {
+ fs.unlinkSync(tempPath);
+ }
+ }
+ });
+
+ test('should handle schema with custom root types', () => {
+ const customSchema = `
+schema {
+ query: RootQuery
+ mutation: RootMutation
+}
+
+type RootQuery {
+ getUser: User
+}
+
+type RootMutation {
+ createUser: User
+}
+
+type User {
+ id: ID!
+ name: String!
+}
+`;
+
+ // Create temporary schema file
+ const tempPath = path.resolve(__dirname, 'fixtures', 'temp-custom.graphql');
+ fs.writeFileSync(tempPath, customSchema);
+
+ try {
+ const schema = loadAndParseGraphQL(tempPath);
+ expect(schema.queries).toHaveLength(1);
+ expect(schema.mutations).toHaveLength(1);
+ expect(schema.queries[0].name).toBe('getUser');
+ expect(schema.mutations[0].name).toBe('createUser');
+ } finally {
+ // Clean up
+ if (fs.existsSync(tempPath)) {
+ fs.unlinkSync(tempPath);
+ }
+ }
+ });
+
+ test('should throw error for invalid GraphQL file', () => {
+ const invalidPath = path.resolve(__dirname, 'fixtures', 'invalid.graphql');
+ fs.writeFileSync(invalidPath, 'completely invalid content without any graphql keywords');
+
+ try {
+ expect(() => loadAndParseGraphQL(invalidPath)).toThrow('Invalid GraphQL schema format');
+ } finally {
+ // Clean up
+ if (fs.existsSync(invalidPath)) {
+ fs.unlinkSync(invalidPath);
+ }
+ }
+ });
+});
\ No newline at end of file
diff --git a/test/grpc.test.js b/test/grpc.test.js
new file mode 100644
index 0000000..bf2a1d6
--- /dev/null
+++ b/test/grpc.test.js
@@ -0,0 +1,88 @@
+const { loadAndParseProto, extractOperationsFromProto } = require('../lib/grpc');
+const fs = require('fs');
+const path = require('path');
+
+describe('gRPC Module', () => {
+ test('loadAndParseProto should throw error if file does not exist', () => {
+ expect(() => loadAndParseProto('nonexistent.proto')).toThrow('Proto file not found');
+ });
+
+ test('loadAndParseProto should parse proto file correctly', () => {
+ const protoPath = path.resolve(__dirname, 'fixtures', 'user-service.proto');
+ const proto = loadAndParseProto(protoPath);
+
+ expect(proto).toBeDefined();
+ expect(proto.package).toBe('user.v1');
+ expect(proto.services).toHaveLength(1);
+ expect(proto.services[0].name).toBe('UserService');
+ expect(proto.services[0].methods).toHaveLength(5);
+ });
+
+ test('extractOperationsFromProto should create operations from proto', () => {
+ const protoPath = path.resolve(__dirname, 'fixtures', 'user-service.proto');
+ const proto = loadAndParseProto(protoPath);
+ const operations = extractOperationsFromProto(proto);
+
+ expect(operations).toHaveLength(5);
+
+ // Check first operation
+ const getUser = operations.find(op => op.methodName === 'GetUser');
+ expect(getUser).toBeDefined();
+ expect(getUser.method).toBe('POST');
+ expect(getUser.protocol).toBe('grpc');
+ expect(getUser.serviceName).toBe('UserService');
+ expect(getUser.requestType).toBe('GetUserRequest');
+ expect(getUser.responseType).toBe('GetUserResponse');
+ expect(getUser.fullName).toBe('user.v1.UserService/GetUser');
+ expect(getUser.tags).toContain('gRPC');
+ expect(getUser.tags).toContain('UserService');
+ });
+
+ test('should handle proto file without package', () => {
+ const testProtoContent = `
+syntax = "proto3";
+
+service TestService {
+ rpc TestMethod(TestRequest) returns (TestResponse);
+}
+
+message TestRequest {
+ string test = 1;
+}
+
+message TestResponse {
+ string result = 1;
+}
+`;
+
+ // Create temporary proto file
+ const tempPath = path.resolve(__dirname, 'fixtures', 'temp-test.proto');
+ fs.writeFileSync(tempPath, testProtoContent);
+
+ try {
+ const proto = loadAndParseProto(tempPath);
+ expect(proto.package).toBe('');
+ expect(proto.services).toHaveLength(1);
+ expect(proto.services[0].methods[0].fullName).toBe('TestService/TestMethod');
+ } finally {
+ // Clean up
+ if (fs.existsSync(tempPath)) {
+ fs.unlinkSync(tempPath);
+ }
+ }
+ });
+
+ test('should throw error for invalid proto file', () => {
+ const invalidPath = path.resolve(__dirname, 'fixtures', 'invalid.proto');
+ fs.writeFileSync(invalidPath, 'this is not a proto file');
+
+ try {
+ expect(() => loadAndParseProto(invalidPath)).toThrow('Invalid proto file format');
+ } finally {
+ // Clean up
+ if (fs.existsSync(invalidPath)) {
+ fs.unlinkSync(invalidPath);
+ }
+ }
+ });
+});
\ No newline at end of file
diff --git a/test/protocol-integration.test.js b/test/protocol-integration.test.js
new file mode 100644
index 0000000..ffce1fa
--- /dev/null
+++ b/test/protocol-integration.test.js
@@ -0,0 +1,162 @@
+const { loadAndParseProto, extractOperationsFromProto } = require('../lib/grpc');
+const { loadAndParseGraphQL, extractOperationsFromGraphQL } = require('../lib/graphql');
+const { loadPostmanCollection, extractRequestsFromPostman } = require('../lib/postman');
+const { matchOperationsDetailed } = require('../lib/match');
+const path = require('path');
+
+describe('gRPC and GraphQL Integration Tests', () => {
+ describe('gRPC Integration', () => {
+ test('should match gRPC operations with Postman collection', () => {
+ // Load gRPC proto file
+ const protoPath = path.resolve(__dirname, 'fixtures', 'user-service.proto');
+ const proto = loadAndParseProto(protoPath);
+ const grpcOperations = extractOperationsFromProto(proto);
+
+ // Load Postman collection with gRPC requests
+ const collectionPath = path.resolve(__dirname, 'fixtures', 'grpc-collection.json');
+ const collection = loadPostmanCollection(collectionPath);
+ const postmanRequests = extractRequestsFromPostman(collection);
+
+ // Match operations
+ const coverageItems = matchOperationsDetailed(
+ grpcOperations,
+ postmanRequests,
+ { verbose: false, strictQuery: false, strictBody: true }
+ );
+
+ // Verify matches
+ expect(coverageItems).toHaveLength(5); // All gRPC operations
+
+ const matchedItems = coverageItems.filter(item => !item.unmatched);
+ expect(matchedItems).toHaveLength(2); // GetUser and CreateUser should match
+
+ // Check specific matches
+ const getUserMatch = matchedItems.find(item => item.operationId && item.operationId.includes('GetUser'));
+ expect(getUserMatch).toBeDefined();
+ expect(getUserMatch.matchedRequests).toHaveLength(1);
+ expect(getUserMatch.matchedRequests[0].name).toBe('Get User');
+
+ const createUserMatch = matchedItems.find(item => item.operationId && item.operationId.includes('CreateUser'));
+ expect(createUserMatch).toBeDefined();
+ expect(createUserMatch.matchedRequests).toHaveLength(1);
+ expect(createUserMatch.matchedRequests[0].name).toBe('Create User');
+ });
+ });
+
+ describe('GraphQL Integration', () => {
+ test('should match GraphQL operations with Postman collection', () => {
+ // Load GraphQL schema
+ const schemaPath = path.resolve(__dirname, 'fixtures', 'blog-schema.graphql');
+ const schema = loadAndParseGraphQL(schemaPath);
+ const graphqlOperations = extractOperationsFromGraphQL(schema);
+
+ // Load Postman collection with GraphQL requests
+ const collectionPath = path.resolve(__dirname, 'fixtures', 'graphql-collection.json');
+ const collection = loadPostmanCollection(collectionPath);
+ const postmanRequests = extractRequestsFromPostman(collection);
+
+ // Match operations
+ const coverageItems = matchOperationsDetailed(
+ graphqlOperations,
+ postmanRequests,
+ { verbose: false, strictQuery: false, strictBody: true }
+ );
+
+ // Verify matches
+ expect(coverageItems).toHaveLength(9); // All GraphQL operations
+
+ const matchedItems = coverageItems.filter(item => !item.unmatched);
+ expect(matchedItems).toHaveLength(2); // user query and createUser mutation should match
+
+ // Check specific matches
+ const userQueryMatch = matchedItems.find(item => item.fieldName === 'user');
+ expect(userQueryMatch).toBeDefined();
+ expect(userQueryMatch.matchedRequests).toHaveLength(1);
+ expect(userQueryMatch.matchedRequests[0].name).toBe('Get User Query');
+
+ const createUserMutationMatch = matchedItems.find(item => item.fieldName === 'createUser');
+ expect(createUserMutationMatch).toBeDefined();
+ expect(createUserMutationMatch.matchedRequests).toHaveLength(1);
+ expect(createUserMutationMatch.matchedRequests[0].name).toBe('Create User Mutation');
+ });
+ });
+
+ describe('Mixed Protocol Tests', () => {
+ test('should handle mixed REST, gRPC, and GraphQL operations', () => {
+ // Create a mix of operations
+ const restOperation = {
+ method: 'GET',
+ path: '/api/users',
+ protocol: undefined, // REST (default)
+ operationId: 'getUsers',
+ statusCode: '200',
+ expectedStatusCodes: ['200']
+ };
+
+ const grpcOperation = {
+ method: 'POST',
+ path: '/user.v1.UserService/GetUser',
+ protocol: 'grpc',
+ serviceName: 'UserService',
+ methodName: 'GetUser',
+ fullName: 'user.v1.UserService/GetUser',
+ operationId: 'UserService_GetUser',
+ statusCode: '200',
+ expectedStatusCodes: ['200']
+ };
+
+ const graphqlOperation = {
+ method: 'POST',
+ path: '/graphql',
+ protocol: 'graphql',
+ operationType: 'query',
+ fieldName: 'user',
+ operationId: 'query_user',
+ statusCode: '200',
+ expectedStatusCodes: ['200']
+ };
+
+ const mixedOperations = [restOperation, grpcOperation, graphqlOperation];
+
+ // Create corresponding requests
+ const postmanRequests = [
+ {
+ name: 'Get Users REST',
+ method: 'GET',
+ rawUrl: 'http://localhost:3000/api/users',
+ testedStatusCodes: ['200'],
+ queryParams: [],
+ body: null
+ },
+ {
+ name: 'Get User gRPC',
+ method: 'POST',
+ rawUrl: 'http://localhost:9090/user.v1.UserService/GetUser',
+ testedStatusCodes: ['200'],
+ queryParams: [],
+ body: '{"user_id": "123"}'
+ },
+ {
+ name: 'Get User GraphQL',
+ method: 'POST',
+ rawUrl: 'http://localhost:4000/graphql',
+ testedStatusCodes: ['200'],
+ queryParams: [],
+ body: '{"query": "query { user(id: \\"123\\") { id name } }"}'
+ }
+ ];
+
+ // Match operations
+ const coverageItems = matchOperationsDetailed(
+ mixedOperations,
+ postmanRequests,
+ { verbose: false, strictQuery: false, strictBody: true }
+ );
+
+ // All operations should match
+ expect(coverageItems).toHaveLength(3);
+ const matchedItems = coverageItems.filter(item => !item.unmatched);
+ expect(matchedItems).toHaveLength(3);
+ });
+ });
+});
\ No newline at end of file