Skip to content

Commit 947ad94

Browse files
Mitchell ShiellMitchell Shiell
Mitchell Shiell
authored and
Mitchell Shiell
committed
prelude PhaseOne rework
1 parent 15b7018 commit 947ad94

File tree

94 files changed

+2661
-3410
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

94 files changed

+2661
-3410
lines changed

.gitignore

-1
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,5 @@ node_modules
44
__MACOSX
55

66
volumes
7-
dist
87
node_modules
98

.prettierrc

+9
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,15 @@
1919
"proseWrap": "always",
2020
"singleQuote": false
2121
}
22+
},
23+
{
24+
"files": "*.yml",
25+
"options": {
26+
"tabWidth": 2,
27+
"printWidth": 100,
28+
"singleQuote": true,
29+
"bracketSpacing": true
30+
}
2231
}
2332
]
2433
}

Makefile

+54-27
Original file line numberDiff line numberDiff line change
@@ -1,34 +1,61 @@
1-
.PHONY: phaseOne phaseTwo down clean
1+
# Define all phony targets (targets that don't create files)
2+
.PHONY: phaseOne phaseTwo down clean cleanVolumes mockData
23

4+
# Start Phase One development environment
35
phaseOne:
4-
docker compose -f ./phaseOne/docker-compose.phaseOne.yml up --attach conductor
6+
PROFILE=phaseOne docker compose -f ./docker-compose.phaseOne.yml --profile phaseOne up --attach conductor
57

6-
phaseTwo:
7-
docker compose -f ./phaseTwo/docker-compose.phaseTwo.yml up --attach conductor
8+
# Start Phase Two development environment
9+
stageDev:
10+
PROFILE=stageDev docker compose -f ./docker-compose.phaseOne.yml --profile stageDev up --attach conductor
11+
12+
# Gracefully shutdown all containers while preserving volumes
13+
down:
14+
@{ \
15+
printf "\033[1;36mConductor:\033[0m Checking for containers...\n"; \
16+
if docker compose -f ./docker-compose.phaseOne.yml ps -a -q 2>/dev/null | grep -q .; then \
17+
printf "\033[1;36mConductor:\033[0m Removing Phase One containers...\n"; \
18+
PROFILE=phaseOne docker compose -f ./docker-compose.phaseOne.yml --profile phaseOne down ; \
19+
fi; \
20+
if docker compose -f ./docker-compose.phaseTwo.yml ps -a -q 2>/dev/null | grep -q .; then \
21+
printf "\033[1;36mConductor:\033[0m Removing Phase Two containers...\n"; \
22+
PROFILE=phaseTwo docker compose -f ./docker-compose.phaseTwo.yml --profile phaseTwo down ; \
23+
fi; \
24+
printf "\033[1;32mSuccess:\033[0m Cleanup completed\n"; \
25+
}
826

9-
platform:
10-
docker compose -f /docker-compose.yml up --attach conductor
27+
# Shutdown all containers and remove all volumes (WARNING: Deletes all data)
28+
downVolumes:
29+
@{ \
30+
printf "\033[1;33mWarning:\033[0m This will remove all containers AND their volumes. Data will be lost.\n"; \
31+
read -p "Are you sure you want to continue? [y/N] " confirm; \
32+
if [ "$$confirm" = "y" ] || [ "$$confirm" = "Y" ]; then \
33+
printf "\033[1;36mConductor:\033[0m Checking for containers...\n"; \
34+
if docker compose -f ./docker-compose.phaseOne.yml ps -a -q 2>/dev/null | grep -q .; then \
35+
printf "\033[1;36mConductor:\033[0m Removing Phase One containers and volumes...\n"; \
36+
PROFILE=phaseOne docker compose -f ./docker-compose.phaseOne.yml --profile phaseOne down -v; \
37+
fi; \
38+
if docker compose -f ./docker-compose.phaseTwo.yml ps -a -q 2>/dev/null | grep -q .; then \
39+
printf "\033[1;36mConductor:\033[0m Removing Phase Two containers and volumes...\n"; \
40+
PROFILE=phaseTwo docker compose -f ./docker-compose.phaseTwo.yml --profile phaseTwo down -v; \
41+
fi; \
42+
printf "\033[1;32mSuccess:\033[0m Cleanup completed\n"; \
43+
else \
44+
printf "\033[1;36mOperation cancelled\033[0m\n"; \
45+
fi \
46+
}
1147

12-
down:
13-
@if [ -n "$$(docker compose -f ./phaseOne/docker-compose.phaseOne.yml ps -q)" ]; then \
14-
echo "Stopping Phase One..."; \
15-
docker compose -f ./phaseOne/docker-compose.phaseOne.yml down -v; \
16-
elif [ -n "$$(docker compose -f ./phaseTwo/docker-compose.phaseTwo.yml ps -q)" ]; then \
17-
echo "Stopping Phase Two..."; \
18-
docker compose -f ./phaseTwo/docker-compose.phaseTwo.yml down -v; \
19-
else \
20-
echo "No running services found"; \
21-
fi
48+
# Load sample data into Elasticsearch
49+
mockData:
50+
PROFILE=mockData docker compose -f ./docker-compose.phaseOne.yml --profile mockData up --attach conductor
2251

52+
# Remove all documents from Elasticsearch (preserves index structure)
2353
clean:
24-
@echo "\033[31mWARNING: This will remove all data within Elasticsearch.\033[0m"
25-
@/bin/bash -c 'echo "Are you sure you want to proceed? [y/N] " && read ans && [ $${ans:-N} = y ] && (\
26-
echo "Stopping related containers..." && \
27-
docker compose -f phaseOne/docker-compose.phaseOne.yml down || true && \
28-
echo "Cleaning up Elasticsearch volumes..." && \
29-
rm -rf ./phaseOne/volumes/es-data/nodes 2>/dev/null || true && \
30-
find ./phaseOne/volumes/es-logs/ -type f ! -name "logs.txt" -delete 2>/dev/null || true && \
31-
docker volume rm -f conductor_elasticsearch-data 2>/dev/null || true && \
32-
docker volume rm -f conductor_elasticsearch-logs 2>/dev/null || true && \
33-
echo "Cleanup completed!" \
34-
)'
54+
@echo "\033[1;33mWarning:\033[0m This will delete ALL data from the Elasticsearch index."
55+
@echo "This action cannot be undone."
56+
@/bin/bash -c 'read -p "Are you sure you want to continue? [y/N] " confirm; \
57+
if [ "$$confirm" = "y" ] || [ "$$confirm" = "Y" ]; then \
58+
PROFILE=clean docker compose -f ./docker-compose.phaseOne.yml --profile clean up --attach conductor; \
59+
else \
60+
echo "\033[1;36mOperation cancelled\033[0m"; \
61+
fi'

README.md

+69-47
Original file line numberDiff line numberDiff line change
@@ -1,77 +1,99 @@
1-
# Prelude Early Release
1+
# Prelude Pre-release
22

3-
Prelude is a tool that enables teams to incrementally build their data platform. By breaking down data portal development into phased steps, teams can systematically verify requirements and user workflows while minimizing technical overhead.
4-
5-
Development progresses through four distinct phases, each building upon the previous phase's foundation while introducing new capabilities.
3+
Prelude is a tool that enables teams to incrementally build their data platform.
4+
By breaking down data portal development into phased steps, teams can
5+
systematically verify requirements and user workflows while minimizing technical
6+
overhead.
67

78
This process enables teams to:
89

9-
* Validate project requirements with hands-on testing
10-
* Gain a clear understanding of user workflows and interactions
11-
* Documented data management processes
12-
* Define security and access control needs
13-
* Build a solid foundation for production deployment planning
10+
- Validate project requirements with hands-on testing
11+
- Gain a clear understanding of user workflows and interactions
12+
- Documented data management processes
13+
- Define security and access control needs
14+
- Build a solid foundation for production deployment planning
15+
16+
## Development Phases
17+
18+
Development progresses through four distinct phases, each building upon the
19+
previous phase's foundation while introducing new capabilities.
1420

15-
## Prelude Development Phases
21+
| Phase | Description | Software Components | Status |
22+
| -------------------------------------------------- | ------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------- | -------------- |
23+
| **PhaseOne:** Data Exploration & Theming | Display your tabular data in a themable portal with our front-end and back-end search components. | CSV-processor, Elasticsearch, Arranger, Stage | 🟢 Working |
24+
| **PhaseTwo:** Tabular Data Management & Validation | Implementation of tabular data submission, storage and validation. | All the above with Lyric, LyricDb (Postgres), Lectern and LecternDb (MongoDb) added | 🟡 Pending |
25+
| **PhaseThree:** File Data & Metadata Management | Implement back-end file management. | All the above with Song, Score, SongDb (Postgres) and Object Storage (Minio) | 🟡 Pending |
26+
| **PhaseFour:** Identity and Access management | Configure Keycloak to authenticate users and authorize what they have access too. | Empahsis on data access control planning and Keycloak configuration | ⚪ Not Started |
1627

17-
| Phase | Description | Software Components |
18-
|-------|-------------|----------------|
19-
| **PhaseOne:** Data Exploration & Theming | Display your tabular data in a themable portal with our front-end and back-end search components. | CSV-processor, Elasticsearch, Arranger, Stage |
20-
| **PhaseTwo:** Tabular Data Management & Validation | Implementation of tabular data submission, storage and validation. | All the above with Lyric, LyricDb (Postgres), Lectern and LecternDb (MongoDb) added |
21-
| **PhaseThree:** File Data & Metadata Management | Implement back-end file management. | All the above with Song, Score, SongDb (Postgres) and Object Storage (Minio) |
22-
| **PhaseFour:** Identity and Access management | Configure Keycloak to authenticate users and authorize what they have access too. | Empahsis on data access control planning and Keycloak configuration |
28+
## Prerequisites
2329

24-
## Running the portal
30+
### Required Software
2531

26-
1. **Set Up Docker:** Install or update to Docker Desktop version 4.32.0 or higher. Visit [Docker's website](https://www.docker.com/products/docker-desktop/) for installation details.
32+
- Node.js 18 or higher
33+
- npm 9 or higher
34+
- Docker Desktop 4.32.0 or higher
35+
([Download here](https://www.docker.com/products/docker-desktop/))
2736

28-
> [!important]
29-
> Allocate sufficient resources to Docker:
30-
> - Minimum CPU: `8 cores`
31-
> - Memory: `8 GB`
32-
> - Swap: `2 GB`
33-
> - Virtual disk: `64 GB`
37+
### Docker Resource Requirements
38+
39+
> [!important] Allocate sufficient resources to Docker:
40+
>
41+
> - Minimum CPU: `8 cores`
42+
> - Memory: `8 GB`
43+
> - Swap: `2 GB`
44+
> - Virtual disk: `64 GB`
3445
>
3546
> Adjust these in Docker Desktop settings under "Resources".
3647
37-
**2. Clone the repo branch**
48+
## Installation & Setup
49+
50+
1. **Clone the repo branch**
3851

3952
```
4053
git clone -b prelude https://github.com/overture-stack/conductor.git
4154
```
4255

43-
**3. Build a Stage image using the dockerfile**
44-
45-
For phaseOne run:
56+
2. **Build the Stage image using the dockerfile** For phaseOne run:
4657

4758
```
48-
cd phaseOne/stageP1
59+
cd apps/stage
4960
docker build -t localstageimage:1.0 .
5061
```
5162

52-
For phaseTwo run:
63+
After editing your stage folder make sure you run the above build command before
64+
deploying locally using this docker compose setup.
5365

54-
```
55-
cd phaseTwo/stageP2
56-
docker build -t localstageimage:2.0 .
57-
```
66+
## Running the Platform
67+
68+
### Deployment Commands
69+
70+
Run one of the following commands from the root of the repository:
5871

59-
After editing your stage folder make sure you run the above build command before deploying locally using this docker compose setup.
72+
| Environment | Unix/macOS | Windows |
73+
| ------------------- | --------------- | ------- |
74+
| phaseOne Platform | `make phaseOne` | pending |
75+
| phaseTwo Platform | pending | pending |
76+
| phaseThree Platform | pending | pending |
6077

61-
**4. Run one of the following commands from the root of the repository:**
78+
Following startup the front end portal will be available at your
79+
`localhost:3000`
6280

63-
| Environment | Unix/macOS | Windows |
64-
|-------------|------------|---------|
65-
| phaseOne Platform | `make phaseOne` | pending |
66-
| phaseTwo Platform | `make phaseTwo` | pending |
81+
### Helper Commands
6782

68-
Following startup the front end portal will be available at your `localhost:3000`
83+
| Description | Unix/macOS | Windows |
84+
| --------------------------------------------------------------- | ------------------ | ------- |
85+
| Shuts down all containers | `make down` | pending |
86+
| Shuts down all containers and removes volumes | `make downVolumes` | pending |
87+
| Submits pre-configured demo data | `make mockData` | pending |
88+
| Removes all documents from elasticsearch | `make clean` | pending |
89+
| Spins up all complementary services for local stage development | `make stageDev` | pending |
6990

70-
**You can also run any of the following helper commands:**
91+
## Documentation
7192

72-
| Description | Unix/macOS | Windows |
73-
|-------------|------------|---------|
74-
| Shuts down all containers | `make down` | pending |
75-
| Shuts down all containers & removes all persistent Elasticsearch volumes (only relevant for phaseOne) | `make clean` | pending |
93+
Detailed documentation can be found in multiple locations:
7694

77-
Information on usage can be found from the `/docs` folder at the root of this repo or from the documentation tab found on our front-end at `http://localhost:3000/documentation`
95+
- The `/docs` folder at the root of this repository
96+
- README files within each root directory containing information on the folder's
97+
purpose and usage
98+
- Frontend documentation available after deployment at
99+
`http://localhost:3000/documentation`

apps/csv-processor/dist/main.js

+93
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
#!/usr/bin/env node
2+
"use strict";
3+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
4+
if (k2 === undefined) k2 = k;
5+
var desc = Object.getOwnPropertyDescriptor(m, k);
6+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
7+
desc = { enumerable: true, get: function() { return m[k]; } };
8+
}
9+
Object.defineProperty(o, k2, desc);
10+
}) : (function(o, m, k, k2) {
11+
if (k2 === undefined) k2 = k;
12+
o[k2] = m[k];
13+
}));
14+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
15+
Object.defineProperty(o, "default", { enumerable: true, value: v });
16+
}) : function(o, v) {
17+
o["default"] = v;
18+
});
19+
var __importStar = (this && this.__importStar) || (function () {
20+
var ownKeys = function(o) {
21+
ownKeys = Object.getOwnPropertyNames || function (o) {
22+
var ar = [];
23+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
24+
return ar;
25+
};
26+
return ownKeys(o);
27+
};
28+
return function (mod) {
29+
if (mod && mod.__esModule) return mod;
30+
var result = {};
31+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
32+
__setModuleDefault(result, mod);
33+
return result;
34+
};
35+
})();
36+
var __importDefault = (this && this.__importDefault) || function (mod) {
37+
return (mod && mod.__esModule) ? mod : { "default": mod };
38+
};
39+
Object.defineProperty(exports, "__esModule", { value: true });
40+
const cli_1 = require("./utils/cli");
41+
const elasticsearch_1 = require("./utils/elasticsearch");
42+
const processor_1 = require("./services/processor");
43+
const validations = __importStar(require("./services/validations"));
44+
const chalk_1 = __importDefault(require("chalk"));
45+
async function main() {
46+
try {
47+
// Display a simple start log with chalk
48+
console.log(chalk_1.default.blue('\n============================================='));
49+
console.log(chalk_1.default.bold.blue(' CSV Processor Starting... 🚀'));
50+
console.log(chalk_1.default.blue('=============================================\n'));
51+
// Setup configuration from CLI arguments
52+
const { config, filePath } = (0, cli_1.setupCLI)();
53+
if (!filePath) {
54+
console.error('Error: No input file specified');
55+
process.exit(1);
56+
}
57+
// Validate file existence and readability
58+
const fileValid = await validations.validateFile(filePath);
59+
if (!fileValid) {
60+
process.exit(1);
61+
}
62+
// Validate batch size
63+
const batchSizeValid = validations.validateBatchSize(config.batchSize);
64+
if (!batchSizeValid) {
65+
process.exit(1);
66+
}
67+
// Validate delimiter
68+
const delimiterValid = validations.validateDelimiter(config.delimiter);
69+
if (!delimiterValid) {
70+
process.exit(1);
71+
}
72+
// Initialize Elasticsearch client
73+
const client = (0, elasticsearch_1.createClient)(config);
74+
// Validate Elasticsearch connection
75+
const connectionValid = await validations.validateElasticsearchConnection(client, config);
76+
if (!connectionValid) {
77+
process.exit(1);
78+
}
79+
// Validate Elasticsearch index
80+
const indexValid = await validations.validateIndex(client, config.elasticsearch.index);
81+
if (!indexValid) {
82+
process.exit(1);
83+
}
84+
// Process the CSV file
85+
await (0, processor_1.processCSVFile)(filePath, config, client);
86+
}
87+
catch (error) {
88+
console.error(chalk_1.default.red('Error during processing:'), error);
89+
process.exit(1);
90+
}
91+
}
92+
// Start processing
93+
main();

apps/csv-processor/dist/readme.md

Whitespace-only changes.

0 commit comments

Comments
 (0)