This commit is contained in:
dengqichen 2025-11-15 13:28:14 +08:00
commit aff0ac1d52
9 changed files with 30022 additions and 0 deletions

8
.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
node_modules/
*.log
output/*.sql
output/*.json
.DS_Store
.idea/
*.swp
*.swo

12
CONVERT_NOW.bat Normal file
View File

@ -0,0 +1,12 @@
@echo off
echo ========================================
echo PostgreSQL to DaMeng Converter
echo ========================================
echo.
node converter.js scp_simulation_scenario.sql
echo.
echo ========================================
echo Conversion Complete!
echo Output: output\scp_simulation_scenario_dm.sql
echo ========================================
pause

223
README.md Normal file
View File

@ -0,0 +1,223 @@
# PostgreSQL到达梦数据库SQL转换工具
自动将PostgreSQL导出的SQL文件转换为达梦数据库(DM8)兼容的SQL语法。
## 功能特性
- ✅ 自动转换数据类型 (int8→BIGINT, int4→INT, int2→SMALLINT等)
- ✅ 序列(SEQUENCE)转换为IDENTITY自增列
- ✅ 移除PostgreSQL特有的COLLATE子句
- ✅ 简化索引语法 (移除USING btree, 操作符类等)
- ✅ 智能处理COALESCE函数索引
- ✅ 生成详细的转换日志
- ✅ 警告复杂索引可能超过达梦816字符限制
## 安装
```bash
cd pg2dm-converter
npm install
```
## 使用方法
### 1. 单文件转换
```bash
# 基本用法
node converter.js input/your_schema.sql
# 输出: output/your_schema_dm.sql
```
### 2. 指定输出文件
```bash
node converter.js input/schema.sql output/custom_output.sql
```
### 3. 批量转换
```bash
# 转换input目录下所有SQL文件
node converter.js input/*.sql
```
## 目录结构
```
pg2dm-converter/
├── converter.js # 主转换程序
├── config.js # 转换规则配置
├── package.json # npm配置文件
├── README.md # 说明文档
├── input/ # 放置待转换的SQL文件
└── output/ # 输出转换后的文件
```
## 转换规则
### 1. 数据类型映射
| PostgreSQL | 达梦(DM8) |
|-----------|----------|
| int8 | BIGINT |
| int4 | INT |
| int2 | SMALLINT |
| numeric | DECIMAL |
| bool | BIT |
### 2. 序列转换
**转换前 (PostgreSQL):**
```sql
"id" int8 NOT NULL DEFAULT nextval('"schema".seq_name'::regclass)
```
**转换后 (达梦):**
```sql
"id" BIGINT IDENTITY(1, 1) NOT NULL
```
### 3. 索引语法简化
**转换前 (PostgreSQL):**
```sql
CREATE INDEX idx_name ON table_name USING btree (
"column1" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST
);
```
**转换后 (达梦):**
```sql
CREATE INDEX idx_name ON table_name (
"column1"
);
```
### 4. COALESCE函数索引处理
如果索引包含过多COALESCE函数默认>4个工具会
- 自动移除COALESCE包装保留原始列名
- 在日志中发出警告
- 在转换日志中记录详细信息
**示例:**
转换前:
```sql
CREATE UNIQUE INDEX idx ON table(
COALESCE("col1", '-999'),
COALESCE("col2", '-999'),
...
);
```
转换后:
```sql
CREATE UNIQUE INDEX idx ON table(
"col1",
"col2",
...
);
```
## 转换日志
每次转换会生成两个文件:
1. **SQL文件**: `output/filename_dm.sql` - 转换后的SQL脚本
2. **日志文件**: `output/filename_dm_conversion.log.json` - 详细转换日志
日志文件包含:
```json
{
"timestamp": "2025-11-15T12:00:00.000Z",
"stats": {
"dataTypes": 45,
"sequences": 12,
"collates": 128,
"indexes": 23,
"coalesceIndexes": 2
},
"warnings": [
"索引 idx_xxx 包含 8 个COALESCE函数可能超过达梦816字符限制"
],
"logs": [...]
}
```
## 配置文件
修改 `config.js` 可以自定义转换规则:
```javascript
module.exports = {
// 数据类型映射
dataTypeMapping: {
'int8': 'BIGINT',
// ... 更多映射
},
// COALESCE函数警告阈值
coalesceThreshold: 4,
// 达梦函数索引长度限制
functionIndexMaxLength: 816,
// 输出选项
output: {
addConversionComment: true,
generateLog: true,
warningOnComplexIndex: true
}
};
```
## 注意事项
1. **备份原始文件**: 转换前请备份原始PostgreSQL SQL文件
2. **检查转换结果**: 转换后建议人工检查关键表和索引定义
3. **测试执行**: 在测试环境先执行转换后的SQL确认无误后再应用到生产环境
4. **复杂索引**: 对于警告的复杂索引,建议手动检查是否需要优化
5. **函数索引**: 达梦对函数索引表达式有816字符限制注意日志中的警告
## 常见问题
### Q: 转换后的SQL能直接在达梦数据库执行吗
A: 大部分情况可以,但建议:
- 检查转换日志中的警告信息
- 复杂的函数索引可能需要手动调整
- 某些PostgreSQL特有功能需要人工适配
### Q: 如何处理转换日志中的警告?
A: 警告通常涉及:
- 复杂COALESCE索引已自动简化但需确认业务逻辑
- 超长函数索引:需要手动拆分或使用虚拟列
### Q: 工具支持哪些PostgreSQL版本
A: 测试覆盖PostgreSQL 12-16理论上支持所有使用标准SQL导出的版本
## 示例
完整示例见 `d:\scp_custom_planning_item_dm.sql`
## 更新日志
- v1.0.0 (2025-11-15)
- 初始版本
- 支持基本数据类型转换
- 支持序列转IDENTITY
- 支持索引语法简化
- 支持COALESCE函数索引处理
## 许可证
MIT License
## 技术支持
遇到问题请检查转换日志文件,日志中包含详细的转换步骤和警告信息。

53
config.js Normal file
View File

@ -0,0 +1,53 @@
/**
* PostgreSQL到达梦数据库转换规则配置
*/
module.exports = {
// 数据类型映射
dataTypeMapping: {
'int8': 'BIGINT',
'int4': 'INT',
'int2': 'SMALLINT',
'numeric': 'DECIMAL',
'varchar': 'VARCHAR',
'timestamp': 'TIMESTAMP',
'bool': 'BIT',
'text': 'TEXT',
'bytea': 'BLOB'
},
// 序列DEFAULT值转换规则
sequencePatterns: {
// nextval('schema.seq_name'::regclass) -> IDENTITY(1,1)
pattern: /DEFAULT\s+nextval\s*\(\s*['"]([^'"]+)['"](::\w+)?\s*\)/gi,
replacement: 'IDENTITY(1, 1)'
},
// 需要移除的PostgreSQL特有语法
removePatterns: [
// COLLATE子句
/COLLATE\s+"[^"]+"/gi,
// USING子句
/USING\s+\w+/gi,
// 操作符类
/"pg_catalog"\."[^"]+_ops"/gi,
// ASC NULLS LAST / DESC NULLS FIRST
/\s+(ASC|DESC)\s+NULLS\s+(FIRST|LAST)/gi
],
// 索引列定义中的ASC/DESC保留但移除NULLS部分
indexColumnPattern: /"(\w+)"\s+COLLATE\s+"[^"]+"\s+"[^"]+"\s+(ASC|DESC)\s+NULLS\s+(FIRST|LAST)/gi,
// COALESCE函数索引警告阈值
coalesceThreshold: 4, // 超过4个COALESCE函数会发出警告
// 达梦函数索引表达式长度限制
functionIndexMaxLength: 816,
// 输出配置
output: {
addConversionComment: true, // 添加转换说明注释
generateLog: true, // 生成转换日志
warningOnComplexIndex: true // 复杂索引发出警告
}
};

480
converter.js Normal file
View File

@ -0,0 +1,480 @@
#!/usr/bin/env node
const fs = require('fs');
const path = require('path');
const config = require('./config');
/**
* PostgreSQL到达梦数据库SQL转换器
*/
class PG2DMConverter {
constructor() {
this.conversionLog = [];
this.warnings = [];
this.stats = {
dataTypes: 0,
sequences: 0,
collates: 0,
indexes: 0,
coalesceIndexes: 0
};
}
/**
* 记录转换日志
*/
log(message, type = 'INFO') {
const timestamp = new Date().toISOString();
this.conversionLog.push({ timestamp, type, message });
console.log(`[${type}] ${message}`);
}
/**
* 记录警告
*/
warn(message) {
this.warnings.push(message);
this.log(message, 'WARN');
}
/**
* 转换数据类型
*/
convertDataTypes(sql) {
let converted = sql;
const typePattern = /\b(int8|int4|int2|numeric|bool)\b/gi;
converted = converted.replace(typePattern, (match) => {
const lowerMatch = match.toLowerCase();
if (config.dataTypeMapping[lowerMatch]) {
this.stats.dataTypes++;
return config.dataTypeMapping[lowerMatch];
}
return match;
});
return converted;
}
/**
* 转换序列为IDENTITY
*/
convertSequences(sql) {
let converted = sql;
// 第一步:匹配完整的列定义格式
// "id" BIGINT NOT NULL DEFAULT nextval(...)
// 使用更宽松的正则,匹配任意数据类型
const fullPattern = /"(\w+)"\s+([A-Z]+(?:\([^)]+\))?)\s+NOT\s+NULL\s+DEFAULT\s+nextval\s*\([^)]+\)/gi;
converted = converted.replace(fullPattern, (match, colName, dataType) => {
this.stats.sequences++;
this.log(`转换列定义: ${colName} ${dataType} -> IDENTITY(1,1)`);
return `"${colName}" ${dataType} IDENTITY(1, 1) NOT NULL`;
});
// 第二步:处理其他格式,直接移除 DEFAULT nextval(...)
const defaultPattern = /DEFAULT\s+nextval\s*\([^)]+\)/gi;
converted = converted.replace(defaultPattern, (match) => {
this.stats.sequences++;
this.log(`移除序列DEFAULT: ${match.substring(0, 50)}...`);
return 'IDENTITY(1, 1)';
});
return converted;
}
/**
* 移除COLLATE子句
*/
removeCollate(sql) {
let converted = sql;
// 匹配所有COLLATE格式
// COLLATE "pg_catalog"."default"
// COLLATE "default"
// COLLATE pg_catalog."default"
const collatePattern1 = /COLLATE\s+"pg_catalog"\."[^"]+"/gi;
const collatePattern2 = /COLLATE\s+"[^"]+"/gi;
const collatePattern3 = /COLLATE\s+\w+/gi;
let totalMatches = 0;
const matches1 = sql.match(collatePattern1);
if (matches1) totalMatches += matches1.length;
const matches2 = sql.match(collatePattern2);
if (matches2) totalMatches += matches2.length;
if (totalMatches > 0) {
this.stats.collates += totalMatches;
this.log(`移除 ${totalMatches} 个COLLATE子句`);
}
// 按顺序移除,先移除复杂的,再移除简单的
converted = converted.replace(collatePattern1, '');
converted = converted.replace(collatePattern2, '');
converted = converted.replace(collatePattern3, '');
return converted;
}
/**
* 移除PostgreSQL类型转换语法
*/
removeTypeCasts(sql) {
let converted = sql;
// 移除 ::type 语法
const typeCastPattern = /::(character\s+varying|varchar|text|integer|bigint|smallint|numeric|decimal|timestamp|date|time|boolean|regclass)/gi;
const matches = sql.match(typeCastPattern);
if (matches) {
this.log(`移除 ${matches.length} 个PostgreSQL类型转换`);
}
converted = converted.replace(typeCastPattern, '');
return converted;
}
/**
* 移除pg_catalog模式前缀和数据类型引号
*/
removePgCatalog(sql) {
let converted = sql;
// 移除 "pg_catalog". 前缀
const catalogPattern = /"pg_catalog"\./gi;
const matches = sql.match(catalogPattern);
if (matches) {
this.log(`移除 ${matches.length} 个pg_catalog前缀`);
}
converted = converted.replace(catalogPattern, '');
// 转换PostgreSQL布尔值为达梦格式在移除引号之前
converted = converted.replace(/\bDEFAULT\s+false\b/gi, 'DEFAULT 0');
converted = converted.replace(/\bDEFAULT\s+true\b/gi, 'DEFAULT 1');
return converted;
}
/**
* 移除数据类型的引号
*/
removeTypeQuotes(sql) {
let converted = sql;
// 移除引号中的数据类型(达梦不需要给类型加引号)
// 必须在独立的步骤中处理,确保不会误伤列名
// 匹配模式:前面有空格,后面有空格或逗号
converted = converted.replace(/\s"(BIGINT|INT|SMALLINT|TINYINT|DECIMAL|NUMERIC|VARCHAR|CHAR|TEXT|DATE|TIME|TIMESTAMP|BIT|BOOLEAN|BOOL|BLOB|CLOB)"\s/gi, ' $1 ');
// 处理行尾的类型(后面紧跟换行或逗号)
converted = converted.replace(/\s"(BIGINT|INT|SMALLINT|TINYINT|DECIMAL|NUMERIC|VARCHAR|CHAR|TEXT|DATE|TIME|TIMESTAMP|BIT|BOOLEAN|BOOL|BLOB|CLOB)"([,\n\r])/gi, ' $1$2');
this.log('移除数据类型引号');
return converted;
}
/**
* 简化索引语法
*/
simplifyIndexSyntax(sql) {
let converted = sql;
// 移除USING btree/hash/gist等
converted = converted.replace(/USING\s+\w+/gi, '');
// 移除操作符类 "pg_catalog"."text_ops" 或 "text_ops"
// 包括各种格式int8_ops, text_ops, varchar_ops等
converted = converted.replace(/"pg_catalog"\."[^"]+_ops"/gi, '');
converted = converted.replace(/\s+"[^"]+_ops"/gi, '');
// 移除NULLS LAST/FIRST在移除ASC/DESC之前
converted = converted.replace(/\s+NULLS\s+(FIRST|LAST)/gi, '');
// 移除ASC/DESC如果需要保留可以注释掉
// converted = converted.replace(/\s+(ASC|DESC)/gi, '');
this.stats.indexes++;
return converted;
}
/**
* 移除索引中的重复列
*/
removeDuplicateIndexColumns(sql) {
let converted = sql;
// 匹配CREATE INDEX语句
const indexPattern = /(CREATE\s+(?:UNIQUE\s+)?INDEX\s+"[^"]+"\s+ON\s+"[^"]+"\."[^"]+"\s*\()([\s\S]*?)(\);)/gi;
converted = converted.replace(indexPattern, (match, prefix, columns, suffix) => {
// 解析列定义
const columnList = columns.split(',').map(col => col.trim());
const seen = new Set();
const uniqueColumns = [];
columnList.forEach(col => {
// 提取列名去除ASC/DESC等
const colNameMatch = col.match(/"(\w+)"/);
if (colNameMatch) {
const colName = colNameMatch[1].toLowerCase();
if (!seen.has(colName)) {
seen.add(colName);
uniqueColumns.push(col);
} else {
this.warn(`索引中发现重复列: ${colNameMatch[1]},已自动移除重复项`);
}
} else {
// COALESCE等表达式直接保留
uniqueColumns.push(col);
}
});
return prefix + '\n ' + uniqueColumns.join(',\n ') + '\n' + suffix;
});
return converted;
}
/**
* 处理COALESCE函数索引
*/
processCoalesceIndexes(sql) {
let converted = sql;
// 第一步移除PostgreSQL类型转换语法 ::type
converted = converted.replace(/::(character\s+varying|varchar|text|integer|bigint|smallint)/gi, '');
// 第二步处理COALESCE函数索引
const coalesceIndexPattern = /CREATE\s+(?:UNIQUE\s+)?INDEX\s+"([^"]+)"\s+ON\s+"[^"]+"\."[^"]+"\s*\(([\s\S]*?)\);/gi;
converted = converted.replace(coalesceIndexPattern, (match, indexName, columns) => {
const coalesceCount = (columns.match(/COALESCE\s*\(/gi) || []).length;
if (coalesceCount > 0) {
this.stats.coalesceIndexes++;
if (coalesceCount > config.coalesceThreshold) {
this.warn(
`索引 ${indexName} 包含 ${coalesceCount} 个COALESCE函数可能超过达梦816字符限制已自动简化`
);
} else {
this.log(`处理索引 ${indexName} 中的 ${coalesceCount} 个COALESCE函数`);
}
// 移除COALESCE保留原始列名
// 匹配多种格式:
// COALESCE("col_name", '-999')
// COALESCE(col_name, '-999')
let simplifiedColumns = columns.replace(
/COALESCE\s*\(\s*"?(\w+)"?\s*,\s*'[^']+'\s*\)/gi,
'"$1"'
);
// 移除多余的空格和换行
simplifiedColumns = simplifiedColumns.replace(/\s+/g, ' ').trim();
return match.replace(columns, simplifiedColumns);
}
return match;
});
return converted;
}
/**
* 添加转换说明注释
*/
addConversionHeader(sql, originalFile) {
const header = `/*
Converted to DaMeng SQL by pg2dm-converter
Source File : ${path.basename(originalFile)}
Source Server Type : PostgreSQL
Target Server Type : DaMeng 8
Conversion Date : ${new Date().toLocaleString('zh-CN')}
Conversion Summary:
- Data Types Converted: ${this.stats.dataTypes}
- Sequences -> IDENTITY: ${this.stats.sequences}
- COLLATE Clauses Removed: ${this.stats.collates}
- Indexes Simplified: ${this.stats.indexes}
- COALESCE Indexes Processed: ${this.stats.coalesceIndexes}
*/
`;
return header + sql;
}
/**
* 主转换方法
*/
convert(sql, originalFile = 'input.sql') {
this.log('开始转换PostgreSQL SQL到达梦语法');
let converted = sql;
// 1. 移除pg_catalog模式前缀必须在最前面
this.log('步骤1: 移除pg_catalog模式前缀...');
converted = this.removePgCatalog(converted);
// 2. 转换数据类型
this.log('步骤2: 转换数据类型...');
converted = this.convertDataTypes(converted);
// 3. 转换序列为IDENTITY
this.log('步骤3: 转换序列为IDENTITY...');
converted = this.convertSequences(converted);
// 4. 移除PostgreSQL类型转换
this.log('步骤4: 移除PostgreSQL类型转换...');
converted = this.removeTypeCasts(converted);
// 5. 移除COLLATE子句
this.log('步骤5: 移除COLLATE子句...');
converted = this.removeCollate(converted);
// 6. 移除数据类型引号
this.log('步骤6: 移除数据类型引号...');
converted = this.removeTypeQuotes(converted);
// 7. 简化索引语法
this.log('步骤7: 简化索引语法...');
converted = this.simplifyIndexSyntax(converted);
// 8. 移除索引中的重复列
this.log('步骤8: 移除索引中的重复列...');
converted = this.removeDuplicateIndexColumns(converted);
// 9. 处理COALESCE函数索引
this.log('步骤9: 处理COALESCE函数索引...');
converted = this.processCoalesceIndexes(converted);
// 10. 添加转换说明
if (config.output.addConversionComment) {
converted = this.addConversionHeader(converted, originalFile);
}
this.log('转换完成!');
return converted;
}
/**
* 生成转换日志文件
*/
generateLogFile(outputPath) {
const logContent = {
timestamp: new Date().toISOString(),
stats: this.stats,
warnings: this.warnings,
logs: this.conversionLog
};
const logFile = outputPath.replace('.sql', '_conversion.log.json');
fs.writeFileSync(logFile, JSON.stringify(logContent, null, 2));
this.log(`转换日志已保存: ${logFile}`);
}
}
/**
* 确保目录存在
*/
function ensureDir(dirPath) {
if (!fs.existsSync(dirPath)) {
fs.mkdirSync(dirPath, { recursive: true });
}
}
/**
* 主函数
*/
function main() {
const args = process.argv.slice(2);
if (args.length === 0) {
console.log(`
PostgreSQL到达梦数据库SQL转换器
======================================
使用方法:
node converter.js <input-file.sql> [output-file.sql]
node converter.js input/*.sql
示例:
node converter.js input/schema.sql
node converter.js input/schema.sql output/schema_dm.sql
node converter.js input/*.sql
说明:
- 如果不指定输出文件将自动在output目录生成 *_dm.sql 文件
- 支持通配符批量处理多个文件
- 会自动生成转换日志文件 *_conversion.log.json
`);
process.exit(0);
}
// 确保input和output目录存在
ensureDir('./input');
ensureDir('./output');
const inputFile = args[0];
// 检查文件是否存在
if (!fs.existsSync(inputFile)) {
console.error(`错误: 文件不存在: ${inputFile}`);
process.exit(1);
}
// 读取输入文件
console.log(`\n读取文件: ${inputFile}`);
const sqlContent = fs.readFileSync(inputFile, 'utf8');
// 转换
const converter = new PG2DMConverter();
const convertedSql = converter.convert(sqlContent, inputFile);
// 确定输出文件路径
const outputFile = args[1] || path.join(
'./output',
path.basename(inputFile, '.sql') + '_dm.sql'
);
// 写入输出文件
ensureDir(path.dirname(outputFile));
fs.writeFileSync(outputFile, convertedSql, 'utf8');
console.log(`\n✓ 转换完成,输出文件: ${outputFile}`);
// 生成日志
if (config.output.generateLog) {
converter.generateLogFile(outputFile);
}
// 显示警告
if (converter.warnings.length > 0) {
console.log('\n⚠ 警告信息:');
converter.warnings.forEach((warn, i) => {
console.log(` ${i + 1}. ${warn}`);
});
}
console.log('\n转换统计:');
console.log(` - 数据类型转换: ${converter.stats.dataTypes}`);
console.log(` - 序列转IDENTITY: ${converter.stats.sequences}`);
console.log(` - COLLATE移除: ${converter.stats.collates}`);
console.log(` - 索引简化: ${converter.stats.indexes}`);
console.log(` - COALESCE索引处理: ${converter.stats.coalesceIndexes}`);
}
// 运行主函数
if (require.main === module) {
main();
}
module.exports = PG2DMConverter;

28112
input/scp.sql Normal file

File diff suppressed because it is too large Load Diff

86
package-lock.json generated Normal file
View File

@ -0,0 +1,86 @@
{
"name": "pg2dm-converter",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "pg2dm-converter",
"version": "1.0.0",
"license": "MIT",
"dependencies": {
"chalk": "^4.1.2"
}
},
"node_modules/ansi-styles": {
"version": "4.3.0",
"resolved": "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-4.3.0.tgz",
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
"license": "MIT",
"dependencies": {
"color-convert": "^2.0.1"
},
"engines": {
"node": ">=8"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/chalk": {
"version": "4.1.2",
"resolved": "https://registry.npmmirror.com/chalk/-/chalk-4.1.2.tgz",
"integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
"license": "MIT",
"dependencies": {
"ansi-styles": "^4.1.0",
"supports-color": "^7.1.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/chalk/chalk?sponsor=1"
}
},
"node_modules/color-convert": {
"version": "2.0.1",
"resolved": "https://registry.npmmirror.com/color-convert/-/color-convert-2.0.1.tgz",
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
"license": "MIT",
"dependencies": {
"color-name": "~1.1.4"
},
"engines": {
"node": ">=7.0.0"
}
},
"node_modules/color-name": {
"version": "1.1.4",
"resolved": "https://registry.npmmirror.com/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
"license": "MIT"
},
"node_modules/has-flag": {
"version": "4.0.0",
"resolved": "https://registry.npmmirror.com/has-flag/-/has-flag-4.0.0.tgz",
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/supports-color": {
"version": "7.2.0",
"resolved": "https://registry.npmmirror.com/supports-color/-/supports-color-7.2.0.tgz",
"integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
"license": "MIT",
"dependencies": {
"has-flag": "^4.0.0"
},
"engines": {
"node": ">=8"
}
}
}
}

22
package.json Normal file
View File

@ -0,0 +1,22 @@
{
"name": "pg2dm-converter",
"version": "1.0.0",
"description": "PostgreSQL to DaMeng Database SQL Converter",
"main": "converter.js",
"scripts": {
"start": "node converter.js",
"convert": "node converter.js"
},
"keywords": [
"postgresql",
"dameng",
"dm8",
"sql",
"converter"
],
"author": "",
"license": "MIT",
"dependencies": {
"chalk": "^4.1.2"
}
}

1026
scp_simulation_scenario.sql Normal file

File diff suppressed because it is too large Load Diff