From 2ba01995d4be33c49e15cb17604fab0838c203de Mon Sep 17 00:00:00 2001 From: Subhasree-Das Date: Thu, 12 Jun 2025 00:57:33 +0530 Subject: [PATCH 01/19] Add the cos2cos sample for CodeEngine Signed-off-by: Shailza Thakur Signed-off-by: Hamza Bharmal Signed-off-by: Subhasree Das Signed-off-by: Subhasree-Das Signed-off-by: Hamza Bharmal --- code-engine-cos2cos/.ceignore | 13 + code-engine-cos2cos/.dockerignore | 13 + code-engine-cos2cos/.gitignore | 10 + code-engine-cos2cos/Dockerfile | 25 ++ code-engine-cos2cos/README.md | 199 ++++++++++ .../bucketOperations/bucketOperations_test.go | 226 +++++++++++ .../bucketOperations/bucketOps.go | 282 ++++++++++++++ .../bucketOperations/objectOps.go | 175 +++++++++ .../bucketOperations/objectOps_test.go | 325 ++++++++++++++++ code-engine-cos2cos/build.sh | 10 + code-engine-cos2cos/config.sh | 287 ++++++++++++++ code-engine-cos2cos/data.sh | 57 +++ code-engine-cos2cos/env_sample | 33 ++ code-engine-cos2cos/go.mod | 49 +++ code-engine-cos2cos/go.sum | 164 ++++++++ code-engine-cos2cos/main.go | 110 ++++++ code-engine-cos2cos/main_test.go | 13 + .../metadata/local-timestamp.go | 49 +++ code-engine-cos2cos/metadata/timestamp.go | 102 +++++ .../metadata/timestamp_test.go | 218 +++++++++++ code-engine-cos2cos/mock/mockS3Client.go | 72 ++++ code-engine-cos2cos/process/process.go | 229 +++++++++++ code-engine-cos2cos/process/process_test.go | 365 ++++++++++++++++++ code-engine-cos2cos/run.sh | 93 +++++ code-engine-cos2cos/testFiles/generator.go | 37 ++ .../userDefinedProcess/processObject.go | 18 + code-engine-cos2cos/utils/convert.go | 29 ++ code-engine-cos2cos/utils/convert_test.go | 73 ++++ 28 files changed, 3276 insertions(+) create mode 100644 code-engine-cos2cos/.ceignore create mode 100644 code-engine-cos2cos/.dockerignore create mode 100644 code-engine-cos2cos/.gitignore create mode 100644 code-engine-cos2cos/Dockerfile create mode 100644 code-engine-cos2cos/README.md create mode 100644 code-engine-cos2cos/bucketOperations/bucketOperations_test.go create mode 100644 code-engine-cos2cos/bucketOperations/bucketOps.go create mode 100644 code-engine-cos2cos/bucketOperations/objectOps.go create mode 100644 code-engine-cos2cos/bucketOperations/objectOps_test.go create mode 100755 code-engine-cos2cos/build.sh create mode 100755 code-engine-cos2cos/config.sh create mode 100644 code-engine-cos2cos/data.sh create mode 100644 code-engine-cos2cos/env_sample create mode 100644 code-engine-cos2cos/go.mod create mode 100644 code-engine-cos2cos/go.sum create mode 100644 code-engine-cos2cos/main.go create mode 100644 code-engine-cos2cos/main_test.go create mode 100644 code-engine-cos2cos/metadata/local-timestamp.go create mode 100644 code-engine-cos2cos/metadata/timestamp.go create mode 100644 code-engine-cos2cos/metadata/timestamp_test.go create mode 100644 code-engine-cos2cos/mock/mockS3Client.go create mode 100644 code-engine-cos2cos/process/process.go create mode 100644 code-engine-cos2cos/process/process_test.go create mode 100755 code-engine-cos2cos/run.sh create mode 100644 code-engine-cos2cos/testFiles/generator.go create mode 100644 code-engine-cos2cos/userDefinedProcess/processObject.go create mode 100644 code-engine-cos2cos/utils/convert.go create mode 100644 code-engine-cos2cos/utils/convert_test.go diff --git a/code-engine-cos2cos/.ceignore b/code-engine-cos2cos/.ceignore new file mode 100644 index 00000000..2fb91d21 --- /dev/null +++ b/code-engine-cos2cos/.ceignore @@ -0,0 +1,13 @@ +.env +.DS_Store +.gitignore +.git/ +*.md +testFiles/ +Unused/ +cos2cos +cronjob.yaml +data +*.sh +input-data +output-data \ No newline at end of file diff --git a/code-engine-cos2cos/.dockerignore b/code-engine-cos2cos/.dockerignore new file mode 100644 index 00000000..2fb91d21 --- /dev/null +++ b/code-engine-cos2cos/.dockerignore @@ -0,0 +1,13 @@ +.env +.DS_Store +.gitignore +.git/ +*.md +testFiles/ +Unused/ +cos2cos +cronjob.yaml +data +*.sh +input-data +output-data \ No newline at end of file diff --git a/code-engine-cos2cos/.gitignore b/code-engine-cos2cos/.gitignore new file mode 100644 index 00000000..0dd2a12e --- /dev/null +++ b/code-engine-cos2cos/.gitignore @@ -0,0 +1,10 @@ +.env +.DS_Store +temp/last_modified_time.json +testFiles/*.txt +cronjob.yaml +cos2cos +downloaded_file.* +temp.sh +input-data +output-data \ No newline at end of file diff --git a/code-engine-cos2cos/Dockerfile b/code-engine-cos2cos/Dockerfile new file mode 100644 index 00000000..dac007f1 --- /dev/null +++ b/code-engine-cos2cos/Dockerfile @@ -0,0 +1,25 @@ +FROM golang:1.23-alpine as builder + +WORKDIR /app + +COPY go.mod go.sum ./ + +RUN go mod tidy + +COPY . . + +# Build the Go application for Linux, stripped of debug information +RUN CGO_ENABLED=0 GOOS=linux go build -o cos2cos + +# Stage 2: Final stage (minimal image) +FROM alpine + +RUN apk --no-cache add ca-certificates + + +# Copy the binary from the builder stage +COPY --from=builder /app/cos2cos / + +RUN chmod +x /cos2cos + +ENTRYPOINT ["/cos2cos"] \ No newline at end of file diff --git a/code-engine-cos2cos/README.md b/code-engine-cos2cos/README.md new file mode 100644 index 00000000..bc3eb4e5 --- /dev/null +++ b/code-engine-cos2cos/README.md @@ -0,0 +1,199 @@ +# COS to COS Processing System + +This repository contains a Go-based application that processes objects (files) in IBM Cloud Object Storage (COS) between a primary and secondary bucket. The system is designed for flexibility, scalability, and ease of use, allowing users to configure the processing logic based on their needs. The program leverages Go routines and IBM Code Engine jobs to implement parallel processing for better performance. + +## Overview + +The **COS to COS Processing System** performs the following tasks: + +1. **Fetch Objects**: It fetches updated objects from a primary bucket. +2. **Process Objects**: It performs a user-defined processing function (e.g., converting lowercase text to uppercase). +3. **Update Secondary Bucket**: It uploads the processed objects to the secondary bucket. +4. **Optimized Processing**: Only objects modified or created after the last processing cycle are processed. +5. **Parallel Processing**: It uses Go-routines and IBM Code Engine Jobs to process multiple files concurrently. + +The program uses IBM Cloud’s SDK to access COS and provides options for authentication using either trusted profiles. + +## Features + +- **User-Defined Processing**: The processing logic can be customized. By default, it converts all lowercase text in a file to uppercase, but this can be modified based on user requirements. +- **Efficient File Handling**: It only processes files that have been modified or created since the last run. +- **Parallel Processing**: Utilizes Go-routines and IBM Code Engine Jobs to process large files concurrently. Jobs are distributed based on a defined `array_size`. +- **Configurable**: Users can configure the system via environment variables for bucket names, region, and other settings. +- **Testing**: Unit tests are provided using Ginkgo and the Testify mock package. +- **Deployment on IBM Cloud**: The application is containerized and can be deployed using IBM Code Engine. + +## Getting Started + +### Prerequisites + +Before you begin, ensure you have the following: + +- **IBM Cloud CLI**: For managing IBM services. Refer [Getting started with the IBM Cloud CLI](https://cloud.ibm.com/docs/cli?topic=cli-getting-started) +- **IBM Plugins**(Refer [Step 3](#setup--configuration)): Verify that below plugins are installed: + 1. ce + 2. cos + 3. iam +- **Podman** or **Docker**(Refer, [Build Steps](#build-steps)): For building the container. +- **IBM Cloud Object Storage (COS)**(Not required when using config.sh script): Ensure you have created two buckets (primary and secondary). Refer, [Getting started with IBM Cloud Object Storage](https://cloud.ibm.com/docs/cloud-object-storage?topic=cloud-object-storage-getting-started-cloud-object-storage). +- **IBM Code Engine**(Not required when using config.sh script): Set up and create a project on IBM Code Engine. Refer, [Getting started with IBM Cloud Code Engine](https://cloud.ibm.com/docs/codeengine?topic=codeengine-getting-started). +- **Go**: For modifying and testing the source code. + +### Setup & Configuration +1. **Clone the Repository**: + ```bash + git clone https://github.ibm.com/ibmcloud-codeengine-internship/code-engine-cos2cos + cd code-engine-cos2cos + ``` +2. **Modify the Processing Function (Optional)**: + + If you want to modify the processing logic, update the `UserDefinedProcessObjectBytes` function in `userDefinedProcess/processObject.go` with your desired functionality. +3. **Installing necessary plugins required for running the build image** + + To learn more: [Extending IBM Cloud CLI with plug-ins](https://cloud.ibm.com/docs/cli?topic=cli-plug-ins). + ```bash + ibmcloud plugin install ce cos iam + ``` +4. **Update [data.sh](/data.sh)**: + + Please update the variable values in the file as per your requirement. The details for creating the cos2cos job with config.sh and building image with build.sh is fetched from this file. + + Note:- The variable CRTokenFilePath should remain same as mentioned in the data.sh file. +5. **Build the Image from the source code**: + Refer [Build Steps](#build-steps) +6. **Setup the required IBM Cloud resources**: + + To automatically set up the required IBM Cloud resources, including COS buckets and secrets, simply run the provided `config.sh` script: + + ```bash + ./config.sh + ``` + This script will do the following: + - Create the **primary** and **secondary** buckets in IBM Cloud Object Storage (if not already created). + - Generate and inject secrets for base configuration and authentication into IBM Cloud UI. + - Set up a **Code Engine** Job with the necessary secrets and configurations, including automatic environment variable injection. + - Once you run the config.sh, the job is created and then you need to manually run it on IBM Cloud Code-Engine using UI or CLI. Refer Step 7. + + Note: + - The script will either take the existing project with the project name provided in data.sh or create a new project with the same name. + - The script will create a trusted profile if it does not exists. + - The script will work only with authentication mechanism of Trusted Profile. In-case the user wants to use service-credentials then he/she needs to manually update the AUTH_SECRET and create the API_KEY for both COS Instance as well as add second command-line argument as "false". The first argument can be set to "true" or "false". See below for more. + - The two command line arguments are: + 1. The first argument "isInCodeEngine", a boolean value, is set to true by-default (meaning the job is running in IBM Cloud Code-Engine). It can be set to "false" when the user wants to run it locally. + 2. The second argument "isUsingTrustedProfile", a boolean value, is set to true by-default (meaning authentication mechanism for COS bucket is TrustedProfile). It can be set to "false" when the user wants to use service-credentials. Please make sure that the AUTH_SECRET is also updated to have the following (in-case using service-credentials): + ```bash + IBM_COS_API_KEY_PRIMARY= + IBM_COS_API_KEY_SECONDARY= + ``` + - IMP: TrustedProfile Authentication method won't work when running the program locally. +7. **Run the Program**: + Once everything is configured, run it + + - Locally using + ```bash + go run . + ``` + OR + - On IBM Cloud Code-Engine using CLI: + ```bash + ibmcloud ce jobrun submit --job ${JOB_NAME} --name ${JOB_NAME} + ``` + + + This will: + - Trigger the job in **IBM Code Engine** to process the files in the primary bucket. + - Upload the processed files to the secondary bucket. + + Note: + - If you are running the program locally, pass 2 command-line arguments "false" "false" while running main.go + - Also make sure that env file is configured accordingly. +8. **Check the Logs**: + After the job completes, check the IBM Cloud UI or the logs to confirm the processing status. + +## Run as Parallel Jobs +The system supports parallel execution using IBM Code Engine Job arrays. This enables multiple job instances to run in parallel, each processing a distinct subset of files. It improves performance and scalability, especially for large data sets or high file counts. + +A sample version of the parallel job setup is included in the repository: +https://github.ibm.com/Hamza/Sample-Running-Jobs-in-Parallel + +## Environment Setup + +To run the project locally, you need to create a `.env` file in the root directory. You can refer to the [`env_sample`](/env_sample) file for the required environment variables and their format. + +## Testing + +The project includes unit tests using **Ginkgo** and **Testify**. You can run the tests locally to verify the functionality of the core processing logic. + +To run the tests: +```bash +go test -v +``` + +## Build Steps + +You can build and push the container image using one of the following methods. + +**Note**: If you are using the [build.sh](/build.sh), by-default the [Method-2](#2-build-using-source-code-local-source) is used. + +#### 1. Build Using Podman (Local Source) + +If you want to build the container image using a local `Dockerfile` with Podman, follow these steps: + +```bash +ibmcloud cr login +podman build -t ${REGISTRY}/${NAMESPACE}/${IMAGE_NAME} --platform linux/amd64 . +podman push ${REGISTRY}/${NAMESPACE}/${IMAGE_NAME} +``` + +#### 2. Build Using Source Code (Local Source) + +To build the image from local source code using IBM Cloud Code Engine: + +```bash +ibmcloud ce build create --name ${BUILD_NAME} --build-type local --image ${REGISTRY}/${NAMESPACE}/${IMAGE_NAME} +ibmcloud ce buildrun submit --build ${BUILD_NAME} --name ${BUILD_NAME}-build-run +``` + +#### 3. Build Using Git-based Source + +To build the image using a Git repository: + +1. Create a deploy key or user-access key in your GitHub repository. +2. Add the private key by creating an SSH secret in IBM Cloud Code Engine. +3. Create a build using the Git repository: + +```bash +ibmcloud ce build create \ + --name ${BUILD_NAME} \ + --image ${REGISTRY}/${NAMESPACE}/${IMAGE_NAME} \ + --source ${GIT_SSH_URL} \ + --context-dir / \ + --strategy dockerfile \ + --git-repo-secret ${GIT_SSH_SECRET} +``` + +4. Submit the build: + +```bash +ibmcloud ce buildrun submit --build ${BUILD_NAME} +``` + +### View Build Logs + +To view the logs of a build run, use the following command: + +```bash +ibmcloud ce buildrun logs -f -n +``` + +Replace `` with the actual name of your build run. + +## Performance + +The program is optimized for handling large files (up to several GBs). For example, when tested with 50 files (each 65MB), the program processed the files in 70 to 100 seconds, with 13 parallel jobs. + +## Troubleshooting + +- **Error: Object Not Found**: Ensure that the primary bucket is correctly configured and contains the objects you expect. +- **Authentication Failure**: Check that the authentication method (trusted profile or service credentials) is correctly set up. +- **Job Timeout or Failure**: If the job takes longer than expected, check the logs for any performance bottlenecks or errors. diff --git a/code-engine-cos2cos/bucketOperations/bucketOperations_test.go b/code-engine-cos2cos/bucketOperations/bucketOperations_test.go new file mode 100644 index 00000000..4568ba2e --- /dev/null +++ b/code-engine-cos2cos/bucketOperations/bucketOperations_test.go @@ -0,0 +1,226 @@ +package bucketOperations + +import ( + "errors" + "testing" + + "github.com/IBM/ibm-cos-sdk-go/aws" + "github.com/IBM/ibm-cos-sdk-go/service/s3" + "github.com/onsi/ginkgo/v2" + "github.com/onsi/gomega" + "github.com/stretchr/testify/mock" + . "ibm.com/codeengine/cos2cos/mock" +) + +func TestBucketOps(t *testing.T) { + gomega.RegisterFailHandler(ginkgo.Fail) + ginkgo.RunSpecs(t, "BucketOperations Suite") +} + +var _ = ginkgo.Describe("ListAvailableBuckets", func() { + var ( + mockS3Client *MockS3Client + bucket *Bucket + ) + + // Before each test, initialize the mock and the Bucket struct + ginkgo.BeforeEach(func() { + mockS3Client = new(MockS3Client) + bucket = &Bucket{ + Name: "test-bucket", + Client: mockS3Client, + } + }) + + // Test case struct format + type TestCase struct { + name string + mockResponse *s3.ListBucketsOutput + mockError error + expectedOutput *s3.ListBucketsOutput + expectedError error + } + + // Define test cases + var testCases = []TestCase{ + { + name: "Successful ListBuckets", + mockResponse: &s3.ListBucketsOutput{ + Buckets: []*s3.Bucket{ + { + Name: aws.String("bucket1"), + }, + { + Name: aws.String("bucket2"), + }, + }, + }, + mockError: nil, + expectedOutput: &s3.ListBucketsOutput{ + Buckets: []*s3.Bucket{ + { + Name: aws.String("bucket1"), + }, + { + Name: aws.String("bucket2"), + }, + }, + }, + expectedError: nil, + }, + { + name: "Error Listing Buckets", + mockResponse: nil, + mockError: errors.New("failed to list buckets"), + expectedOutput: nil, + expectedError: errors.New("failed to list buckets"), + }, + } + + // Iterate over test cases + for _, testCase := range testCases { + ginkgo.It(testCase.name, func() { + // Arrange mock behavior + mockS3Client.On("ListBuckets", mock.Anything).Return(testCase.mockResponse, testCase.mockError) + + // Act + result, err := bucket.ListAvailableBuckets() + + // Assert the results + if testCase.expectedError != nil { + gomega.Expect(err).To(gomega.HaveOccurred()) + gomega.Expect(err.Error()).To(gomega.Equal(testCase.expectedError.Error())) + } else { + gomega.Expect(err).ToNot(gomega.HaveOccurred()) + gomega.Expect(result).To(gomega.Equal(testCase.expectedOutput)) + } + + // Assert that the mock was called as expected + mockS3Client.AssertExpectations(ginkgo.GinkgoT()) + }) + } +}) + +var _ = ginkgo.Describe("ListBucketObjects", func() { + + var ( + mockClient *MockS3Client + bucket *Bucket + ) + + // Setup before each test + ginkgo.BeforeEach(func() { + mockClient = new(MockS3Client) + bucket = &Bucket{ + Name: "test-bucket", + Client: mockClient, // Inject mock client for testing + } + }) + + // Define test cases + testCases := []struct { + description string + mockResponse *s3.ListObjectsV2Output + mockError error + expectedError bool + expectedCount int + }{ + { + description: "Successful response with objects", + mockResponse: &s3.ListObjectsV2Output{ + Contents: []*s3.Object{ + {Key: aws.String("file1.txt")}, + {Key: aws.String("file2.txt")}, + }, + }, + mockError: nil, + expectedError: false, + expectedCount: 2, + }, + { + description: "Error response from IBM COS", + mockResponse: nil, + mockError: errors.New("some IBM COS error"), + expectedError: true, + expectedCount: 0, + }, + } + + // Iterate through the test cases + for _, tc := range testCases { + tc := tc // capture the range variable + ginkgo.Context(tc.description, func() { + ginkgo.It("should handle the response correctly", func() { + // Setup mock expectation + mockClient.On("ListObjectsV2", mock.AnythingOfType("*s3.ListObjectsV2Input")).Return(tc.mockResponse, tc.mockError) + + // Call the method + result, err := bucket.ListBucketObjects() + + // Assertions + if tc.expectedError { + gomega.Expect(err).To(gomega.HaveOccurred()) + gomega.Expect(result).To(gomega.BeNil()) + } else { + gomega.Expect(err).ToNot(gomega.HaveOccurred()) + gomega.Expect(result).ToNot(gomega.BeNil()) + gomega.Expect(len(result.Contents)).To(gomega.Equal(tc.expectedCount)) + } + + // Assert mock expectations were met + mockClient.AssertExpectations(ginkgo.GinkgoT()) + }) + }) + } +}) + +var _ = ginkgo.Describe("UploadBytesToBucket", func() { + var mockClient *MockS3Client + var bucket *Bucket + + ginkgo.BeforeEach(func() { + mockClient = new(MockS3Client) + bucket = &Bucket{ + Name: "Test Bucket", + Client: mockClient, + } + }) + type UploadBytesToBucketTestCase struct { + ObjectKey string + Data []byte + MockResponse *s3.PutObjectOutput + MockError error + ExpectedError error + } + testCases := []UploadBytesToBucketTestCase{ + { + ObjectKey: "test-object-1", + Data: []byte("some data to upload"), + MockResponse: &s3.PutObjectOutput{}, // Successful response + MockError: nil, + ExpectedError: nil, // No error expected + }, + { + ObjectKey: "test-object-2", + Data: []byte("some data to upload"), + MockResponse: nil, // No response + MockError: errors.New("failed to upload data"), + ExpectedError: errors.New("failed to upload data"), // Error expected + }, + } + + for _, tc := range testCases { + ginkgo.It("Should handle uploads correctly", func() { + mockClient.On("PutObject", mock.Anything).Return(tc.MockResponse, tc.MockError) + + err := bucket.UploadBytesToBucket(tc.ObjectKey, tc.Data) + + if tc.ExpectedError == nil { + gomega.Expect(err).NotTo(gomega.HaveOccurred()) + } else { + gomega.Expect(err).To(gomega.HaveOccurred()) + } + mockClient.AssertExpectations(ginkgo.GinkgoT()) + }) + } +}) diff --git a/code-engine-cos2cos/bucketOperations/bucketOps.go b/code-engine-cos2cos/bucketOperations/bucketOps.go new file mode 100644 index 00000000..bed7a0cd --- /dev/null +++ b/code-engine-cos2cos/bucketOperations/bucketOps.go @@ -0,0 +1,282 @@ +package bucketOperations + +import ( + "bytes" + "fmt" + + "github.com/IBM/ibm-cos-sdk-go/aws" + "github.com/IBM/ibm-cos-sdk-go/aws/credentials/ibmiam" + "github.com/IBM/ibm-cos-sdk-go/aws/session" + "github.com/IBM/ibm-cos-sdk-go/service/s3" +) + +type Bucket struct { + Name string + Client S3ClientInterface +} + +// Interface that includes all sdk methods on which bucket depends. +type S3ClientInterface interface { + HeadObject(input *s3.HeadObjectInput) (*s3.HeadObjectOutput, error) + ListBuckets(input *s3.ListBucketsInput) (*s3.ListBucketsOutput, error) + ListObjectsV2(input *s3.ListObjectsV2Input) (*s3.ListObjectsV2Output, error) + ListObjectsV2Pages(input *s3.ListObjectsV2Input, fn func(*s3.ListObjectsV2Output, bool) bool) error + GetObject(input *s3.GetObjectInput) (*s3.GetObjectOutput, error) + PutObject(input *s3.PutObjectInput) (*s3.PutObjectOutput, error) + GetObjectTagging(input *s3.GetObjectTaggingInput) (*s3.GetObjectTaggingOutput, error) + PutObjectTagging(input *s3.PutObjectTaggingInput) (*s3.PutObjectTaggingOutput, error) + DeleteObjectTagging(input *s3.DeleteObjectTaggingInput) (*s3.DeleteObjectTaggingOutput, error) + DeleteObject(input *s3.DeleteObjectInput) (*s3.DeleteObjectOutput, error) +} + +type CosSession struct { + apiKey string + resourceInstanceID string + serviceEndpoint string + region string + authEndpoint string + trustedProfileID string + crtokenFilePath string +} + +func NewCosClient(apiKey, resourceInstanceID, serviceEndpoint, region string, authEndpoint ...string) *s3.S3 { + // fmt.Println("Creating cos Client") + var authEndpointVal string + if len(authEndpoint) == 0 { + authEndpointVal = "https://iam.cloud.ibm.com/identity/token" + } else { + authEndpointVal = authEndpoint[0] + } + + cosSession := CosSession{ + apiKey: apiKey, + resourceInstanceID: resourceInstanceID, + serviceEndpoint: serviceEndpoint, + region: region, + authEndpoint: authEndpointVal, + } + cosClient := cosSession.CreateIBMCOSSession() + + return cosClient +} + +func NewCosClientTrustedProfile(trustedProfileID, crTokenFilePath, resourceInstanceID, serviceEndpoint, region string, authEndpoint ...string) *s3.S3 { + var authEndpointVal string + if len(authEndpoint) == 0 { + authEndpointVal = "https://iam.cloud.ibm.com/identity/token" + } else { + authEndpointVal = authEndpoint[0] + } + + cosSession := CosSession{ + resourceInstanceID: resourceInstanceID, + serviceEndpoint: serviceEndpoint, + region: region, + authEndpoint: authEndpointVal, + trustedProfileID: trustedProfileID, + crtokenFilePath: crTokenFilePath, + } + cosClient := cosSession.CreateIBMCOSSessionTrustedProfile() + + return cosClient +} + +func NewBucket(name string, cosClient *s3.S3) *Bucket { + + configuredBucket := Bucket{ + Name: name, + Client: cosClient, + } + + return &configuredBucket +} + +// Helper function to create session client with provided config from bucket. +// Creating config using Service Credentials +func (c *CosSession) CreateIBMCOSSession() *s3.S3 { + conf := aws.NewConfig(). + WithEndpoint(*aws.String(c.serviceEndpoint)). + WithCredentials(ibmiam.NewStaticCredentials(aws.NewConfig(), + c.authEndpoint, c.apiKey, c.resourceInstanceID)). + WithRegion(c.region). + WithS3ForcePathStyle(true) + + sess := session.Must(session.NewSession()) + client := s3.New(sess, conf) + + return client +} + +func (c *CosSession) CreateIBMCOSSessionTrustedProfile() *s3.S3 { + // Creating config using Trusted Profile + conf := aws.NewConfig(). + WithEndpoint(*aws.String(c.serviceEndpoint)). + WithCredentials(ibmiam.NewTrustedProfileCredentialsCR(aws.NewConfig(), c.authEndpoint, c.trustedProfileID, c.crtokenFilePath, c.resourceInstanceID)). + WithRegion(c.region). + WithS3ForcePathStyle(true) + + sess := session.Must(session.NewSession()) + client := s3.New(sess, conf) + + return client +} + +// Function to list all the available buckets for a config. +func (b *Bucket) ListAvailableBuckets() (*s3.ListBucketsOutput, error) { + + // Create client + // client := CreateIBMCOSSession() + client := b.Client + // Call Function + d, err := client.ListBuckets(&s3.ListBucketsInput{}) + + if err != nil { + fmt.Println("Could not list the buckets") + return nil, err + } + return d, nil +} + +func (b *Bucket) ListBucketObjects() (*s3.ListObjectsV2Output, error) { + bucketName := b.Name + // fmt.Println("Listing Bucket Objects in Bucket:", bucketName) + // Create client + // client := CreateIBMCOSSession() + client := b.Client + + // Call Function + Input := &s3.ListObjectsV2Input{ + Bucket: aws.String(bucketName), + } + // fmt.Println("Input object created", Input) + objectList, e := client.ListObjectsV2(Input) + + if e != nil { + fmt.Println("Error listing objects", e) + return nil, e + } + + return objectList, nil +} + +func (b *Bucket) ListBucketObjectsPagination() (*s3.ListObjectsV2Output, error) { + bucketName := b.Name + // fmt.Println("Listing Bucket Objects in Bucket:", bucketName) + // Create client + // client := CreateIBMCOSSession() + client := b.Client + + // Call Function + input := &s3.ListObjectsV2Input{ + Bucket: aws.String(bucketName), + } + + var allContents []*s3.Object + + err := client.ListObjectsV2Pages(input, func(page *s3.ListObjectsV2Output, lastPage bool) bool { + for _, obj := range page.Contents { + allContents = append(allContents, &s3.Object{ + Key: obj.Key, + LastModified: obj.LastModified, + }) + fmt.Println(*obj.Key) + } + return true + }) + if err != nil { + fmt.Println("Error listing objects", err) + return nil, err + } + + combinedOutput := &s3.ListObjectsV2Output{ + Contents: allContents, + } + return combinedOutput, nil + +} + +func (bucket *Bucket) UploadBytesToBucket(objectKey string, data []byte) error { + client := bucket.Client + + input := &s3.PutObjectInput{ + Bucket: aws.String(bucket.Name), + Key: aws.String(objectKey), + Body: bytes.NewReader(data), + } + + _, err := client.PutObject(input) + if err != nil { + return fmt.Errorf("failed to upload data: %w", err) + } + + return nil +} + +// Function to upload a object to a bucket. - Not Tested. +/* +func (b *Bucket) UploadObjectToBucket(object *s3.GetObjectOutput) error { + client := b.Client + + var buf bytes.Buffer + _, err := buf.ReadFrom(object.Body) + if err != nil { + log.Fatalf("Unable to read object body: %v", err) + return fmt.Errorf("Unable to read object body: %v", err) + } + + _, err = client.PutObject(&s3.PutObjectInput{ + Bucket: aws.String(destinationBucket), + Key: aws.String(destinationKey), + Body: bytes.NewReader(buf.Bytes()), + }) + if err != nil { + log.Fatalf("Unable to upload object to destination bucket: %v", err) + return fmt.Errorf("Unable to upload object to destination bucket: %v", err) + } + return nil +} +*/ + +// Returns an error if the upload fails. - Unused +/* +func (b *Bucket) UploadFileToBucket(objectKey string, filePath string) error { + + // Create client + // client := CreateIBMCOSSession() + client := b.Client + bucketName := b.Name + + // Read object into byte + + file, err := os.Open(filePath) + + if err != nil { + log.Fatalf("Unable to open file: %v", err) + return err + } + + fileInfo, err := file.Stat() + if err != nil { + log.Fatalf("Unable to get file stats: %v", err) + return err + } + fileBytes := make([]byte, fileInfo.Size()) + _, err = file.Read(fileBytes) + + if err != nil { + log.Fatalf("Unable to read file: %v", err) + return err + } + + input := s3.PutObjectInput{ + Bucket: aws.String(bucketName), + Key: aws.String(objectKey), + Body: bytes.NewReader(fileBytes), + } + + // Call Function to upload (Put) an object + _, _ = client.PutObject(&input) + // fmt.Println(result) + return nil +} +*/ diff --git a/code-engine-cos2cos/bucketOperations/objectOps.go b/code-engine-cos2cos/bucketOperations/objectOps.go new file mode 100644 index 00000000..2c0ac64f --- /dev/null +++ b/code-engine-cos2cos/bucketOperations/objectOps.go @@ -0,0 +1,175 @@ +package bucketOperations + +import ( + "fmt" + "time" + + "github.com/IBM/ibm-cos-sdk-go/aws" + "github.com/IBM/ibm-cos-sdk-go/service/s3" +) + +// Function to compare the time between object and timestamp given. +// Function returns 1 if lastBackupTime is more than object timestamp, +// 0 if same, +// -1 if less +// A positive value indicates that process is required. +func CompareUpdateTime(res *s3.Object, lastProcessTime time.Time) int { + // If the object is null, return -1 instead of error stating no processing required + if res == nil { + return -1 + } + if res.LastModified.Before(lastProcessTime) { + return -1 + } else if res.LastModified.Equal(lastProcessTime) { + return 0 + } else { + return 1 + } +} + +func (b *Bucket) CheckIfTagExists(objectKey string) (bool, error) { + // Get object tags from COS + resp, err := b.Client.GetObjectTagging(&s3.GetObjectTaggingInput{ + Bucket: aws.String(b.Name), + Key: aws.String(objectKey), + }) + + if err != nil { + return false, fmt.Errorf("Error getting object tags: %v", err) + + } + tagKeyToCheck := "isInProcessing" + + // Iterate through the tags and check if the key exists + for _, tag := range resp.TagSet { + if *tag.Key == tagKeyToCheck { + // fmt.Println(tag) + return true, nil + } + } + return false, nil +} + +func IsProcessingRequired(res *s3.Object, timestamp time.Time) bool { + compareVal := CompareUpdateTime(res, timestamp) + // fmt.Printf("The time for last process is: %s\nObject last time:%s\n", timestamp.String(), res.LastModified.String()) + // A negative value indicates that process is required. + return compareVal > 0 +} + +func (b *Bucket) GetObject(objectKey string) (*s3.GetObjectOutput, error) { + client := b.Client + bucketName := b.Name + + Input := &s3.GetObjectInput{ + Bucket: aws.String(bucketName), + Key: aws.String(objectKey), + } + + response, err := client.GetObject(Input) + if err != nil { + return nil, fmt.Errorf("error getting the object %w", err) + } + + return response, nil +} + +/* +Function to store the object in a file in local storage +func StoreObject(objectKey string, response *s3.GetObjectOutput) error { + + tempFile, err := os.Create(fmt.Sprintf("temp/%s", objectKey)) + + if err != nil { + log.Fatal(err) + } + + // Read from response body + _, err = tempFile.ReadFrom(response.Body) + + if err != nil { + return err + } + return nil +} +*/ + +/* +// Function to get the metadata of an object without getting the object itself - Unused +func (b *Bucket) GetObjectMetadata(objectKey string) (*s3.HeadObjectOutput, error) { + client := b.Client + bucketName := b.Name + + input := &s3.HeadObjectInput{ + Bucket: aws.String(bucketName), + Key: aws.String(objectKey), + } + + result, err := client.HeadObject(input) + if err != nil { + // log.Fatalf("Failed to retrieve metadata: %v", err) + return nil, err + } + + // fmt.Println("Metadata for object:", objectKey) + // fmt.Printf("Last Modified: %v\n", result.LastModified) + // fmt.Printf("Size: %d bytes\n", *result.ContentLength) + // fmt.Printf("Content-Type: %s\n", *result.ContentType) + // fmt.Printf("ETag: %s\n", *result.ETag) + + // Print custom metadata + if result.Metadata != nil { + fmt.Println("Custom Metadata:") + for key, value := range result.Metadata { + fmt.Printf("%s: %s\n", key, *value) + } + } + + return result, nil +} +*/ +// test + +// Add a tag to the object (for example, a custom tag) +// The tag is added as a key-value pair +func (b *Bucket) AddTag(objectKey, tagKey, tagValue string) error { + + cosClient := b.Client + tagging := &s3.PutObjectTaggingInput{ + Bucket: aws.String(b.Name), + Key: aws.String(objectKey), + Tagging: &s3.Tagging{ + TagSet: []*s3.Tag{ + { + Key: aws.String(tagKey), + Value: aws.String(tagValue), + }, + }, + }, + } + // Perform the tagging operation + _, err := cosClient.PutObjectTagging(tagging) + if err != nil { + return fmt.Errorf("failed to add tag to object: %v", err) + } + + return nil +} + +// Delete the specified key from bucket object tags +func (b *Bucket) DeleteTag(objectKey, tagKey string) error { + + cosClient := b.Client + + // Perform the untagging operation + _, err := cosClient.DeleteObjectTagging(&s3.DeleteObjectTaggingInput{ + Bucket: aws.String(b.Name), + Key: aws.String(objectKey), + }) + + if err != nil { + return fmt.Errorf("failed to delete tag to object: %v", err) + } + // fmt.Printf("Tag Deleted successfully from the object: %s\n", objectKey) + return nil +} diff --git a/code-engine-cos2cos/bucketOperations/objectOps_test.go b/code-engine-cos2cos/bucketOperations/objectOps_test.go new file mode 100644 index 00000000..673c8486 --- /dev/null +++ b/code-engine-cos2cos/bucketOperations/objectOps_test.go @@ -0,0 +1,325 @@ +package bucketOperations + +import ( + "errors" + "strings" + "time" + + "github.com/IBM/ibm-cos-sdk-go/aws" + "github.com/IBM/ibm-cos-sdk-go/service/s3" + "github.com/onsi/ginkgo/v2" + "github.com/onsi/gomega" + "github.com/stretchr/testify/mock" + mockS3 "ibm.com/codeengine/cos2cos/mock" +) + +// func TestObjectOps(t *testing.T) { +// gomega.RegisterFailHandler(ginkgo.Fail) +// ginkgo.RunSpecs(t, "ObjectOperations Suite") +// } + +var _ = ginkgo.Describe("CompareUpdateTime", func() { + type testCase struct { + name string + lastProcessTime time.Time + obj *s3.Object + expected int + } + + testCases := []testCase{ + { + name: "Zero Valued Time - When No previous processing", + lastProcessTime: time.Time{}, + obj: &s3.Object{ + Key: aws.String("TestObject"), + LastModified: func() *time.Time { t := time.Date(2025, 3, 12, 10, 0, 0, 0, time.UTC); return &t }(), + }, + expected: 1, + }, + { + name: "Object Time greater than lastProcessedTime", + lastProcessTime: time.Date(2025, 3, 12, 10, 0, 0, 0, time.UTC), + obj: &s3.Object{ + LastModified: func() *time.Time { t := time.Date(2025, 3, 12, 10, 0, 1, 1, time.UTC); return &t }(), + }, + expected: 1, + }, + { + name: "Object Time before than lastProcessedTime", + lastProcessTime: time.Date(2025, 3, 12, 10, 0, 0, 0, time.UTC), + obj: &s3.Object{ + LastModified: func() *time.Time { t := time.Date(2025, 3, 12, 9, 0, 1, 1, time.UTC); return &t }(), + }, + expected: -1, + }, + { + name: "Object Time Equal than lastProcessedTime", + lastProcessTime: time.Date(2025, 3, 12, 10, 0, 1, 0, time.UTC), + obj: &s3.Object{ + LastModified: func() *time.Time { t := time.Date(2025, 3, 12, 10, 0, 1, 0, time.UTC); return &t }(), + }, + expected: 0, + }, + { + name: "Null Object", + lastProcessTime: time.Date(2025, 3, 12, 10, 0, 1, 0, time.UTC), + obj: nil, + expected: -1, + }, + } + + for _, testcase := range testCases { + ginkgo.It(testcase.name, func() { + result := CompareUpdateTime(testcase.obj, testcase.lastProcessTime) + // fmt.Println(result) + gomega.Expect(result).To(gomega.Equal(testcase.expected)) + }) + } +}) + +var _ = ginkgo.Describe("IsProcessingRequired", func() { + type testCase struct { + name string + lastProcessTime time.Time + obj *s3.Object + expected bool + } + + testCases := []testCase{ + { + name: "Zero Valued Time - When No previous processing", + lastProcessTime: time.Time{}, + obj: &s3.Object{ + Key: aws.String("TestObject"), + LastModified: func() *time.Time { t := time.Date(2025, 3, 12, 10, 0, 0, 0, time.UTC); return &t }(), + }, + expected: true, + }, + { + name: "Object Time greater than lastProcessedTime", + lastProcessTime: time.Date(2025, 3, 12, 10, 0, 0, 0, time.UTC), + obj: &s3.Object{ + LastModified: func() *time.Time { t := time.Date(2025, 3, 12, 10, 0, 1, 1, time.UTC); return &t }(), + }, + expected: true, + }, + { + name: "Object Time before than lastProcessedTime", + lastProcessTime: time.Date(2025, 3, 12, 10, 0, 0, 0, time.UTC), + obj: &s3.Object{ + LastModified: func() *time.Time { t := time.Date(2025, 3, 12, 9, 0, 1, 1, time.UTC); return &t }(), + }, + expected: false, + }, + { + name: "Object Time Equal than lastProcessedTime", + lastProcessTime: time.Date(2025, 3, 12, 10, 0, 1, 0, time.UTC), + obj: &s3.Object{ + LastModified: func() *time.Time { t := time.Date(2025, 3, 12, 10, 0, 1, 0, time.UTC); return &t }(), + }, + expected: false, + }, + { + name: "Null Object", + lastProcessTime: time.Date(2025, 3, 12, 10, 0, 1, 0, time.UTC), + obj: nil, + expected: false, + }, + } + + for _, testcase := range testCases { + ginkgo.It(testcase.name, func() { + result := IsProcessingRequired(testcase.obj, testcase.lastProcessTime) + gomega.Expect(result).To(gomega.Equal(testcase.expected)) + }) + } +}) + +var _ = ginkgo.Describe("CheckIfTagExists", func() { + var ( + mockS3Client *mockS3.MockS3Client + bucket *Bucket + ) + + // Initialize before each test + ginkgo.BeforeEach(func() { + mockS3Client = new(mockS3.MockS3Client) + bucket = &Bucket{ + Client: mockS3Client, + Name: "test-bucket", + } + }) + // Struct to represent a test case for CheckIfTagExists + type CheckIfTagExistsTestCase struct { + ObjectKey string + TagKey string + MockResponse *s3.GetObjectTaggingOutput + MockError error + ExpectedResult bool + } + // Test cases in a struct array + testCases := []CheckIfTagExistsTestCase{ + { + ObjectKey: "test-object-1", + MockResponse: &s3.GetObjectTaggingOutput{ + TagSet: []*s3.Tag{ + {Key: aws.String("isInProcessing"), Value: aws.String("value1")}, + {Key: aws.String("Tag2"), Value: aws.String("value2")}, + }, + }, + MockError: nil, + ExpectedResult: true, + }, + { + ObjectKey: "test-object-2", + MockResponse: &s3.GetObjectTaggingOutput{ + TagSet: []*s3.Tag{ + {Key: aws.String("Tag1"), Value: aws.String("value1")}, + {Key: aws.String("Tag2"), Value: aws.String("value2")}, + }, + }, + MockError: nil, + ExpectedResult: false, + }, + { + ObjectKey: "test-object-3", + MockResponse: nil, + MockError: errors.New("failed to get tags"), + ExpectedResult: false, + }, + } + + // Iterate over the test cases + for _, tc := range testCases { + ginkgo.It("should correctly check if the tag exists", func() { + // Mock the GetObjectTagging behavior + mockS3Client.On("GetObjectTagging", mock.Anything).Return(tc.MockResponse, tc.MockError) + + // Call the method + result, _ := bucket.CheckIfTagExists(tc.ObjectKey) + + // Assertions + gomega.Expect(result).To(gomega.Equal(tc.ExpectedResult)) + + // Assert that the GetObjectTagging method was called with the correct input + mockS3Client.AssertExpectations(ginkgo.GinkgoT()) + }) + } +}) + +var _ = ginkgo.Describe("GetObject", func() { + var ( + mockS3Client *mockS3.MockS3Client + bucket *Bucket + ) + + ginkgo.BeforeEach(func() { + mockS3Client = new(mockS3.MockS3Client) + bucket = &Bucket{ + Name: "test-bucket", + Client: mockS3Client, + } + }) + + var testCases = []struct { + ObjectKey string + MockResponse *s3.GetObjectOutput + MockError error + ExpectedError bool + }{ + { + ObjectKey: "test-object-key", + MockResponse: &s3.GetObjectOutput{ + Body: aws.ReadSeekCloser(strings.NewReader("object content")), + }, + MockError: nil, + ExpectedError: false, + }, + { + ObjectKey: "non-existent-object", + MockResponse: nil, + MockError: errors.New("no such key"), + ExpectedError: true, + }, + } + + for _, tc := range testCases { + tc := tc + ginkgo.It("should return correct result for object key: "+tc.ObjectKey, func() { + // Setup mock response + mockS3Client.On("GetObject", mock.Anything).Return(tc.MockResponse, tc.MockError) + + // Call the method + object, err := bucket.GetObject(tc.ObjectKey) + + // Verify the result + if tc.ExpectedError { + gomega.Expect(err).To(gomega.MatchError(tc.MockError)) + gomega.Expect(object).To(gomega.BeNil()) + } else { + gomega.Expect(err).To(gomega.BeNil()) + gomega.Expect(object).NotTo(gomega.BeNil()) + gomega.Expect(object.Body).NotTo(gomega.BeNil()) + } + + // Assert mock expectations + mockS3Client.AssertExpectations(ginkgo.GinkgoT()) + }) + } +}) + +var _ = ginkgo.Describe("AddTag", func() { + var ( + mockS3Client *mockS3.MockS3Client + bucket *Bucket + ) + + ginkgo.BeforeEach(func() { + mockS3Client = new(mockS3.MockS3Client) + bucket = &Bucket{ + Name: "test-bucket", + Client: mockS3Client, + } + }) + + ginkgo.It("should have no errors", func() { + mockS3Client.On("PutObjectTagging", mock.Anything).Return(&s3.PutObjectTaggingOutput{}, nil) + err := bucket.AddTag("ObjecttKey1", "tagKey1", "value") + + gomega.Expect(err).To(gomega.BeNil()) + }) + ginkgo.It("should have handle errors", func() { + mockS3Client.On("PutObjectTagging", mock.Anything).Return(&s3.PutObjectTaggingOutput{}, errors.New("error adding tag")) + err := bucket.AddTag("ObjecttKey1", "tagKey1", "value") + + gomega.Expect(err).ToNot(gomega.BeNil()) + }) +}) + +var _ = ginkgo.Describe("DeleteTag", func() { + var ( + mockS3Client *mockS3.MockS3Client + bucket *Bucket + ) + + ginkgo.BeforeEach(func() { + mockS3Client = new(mockS3.MockS3Client) + bucket = &Bucket{ + Name: "test-bucket", + Client: mockS3Client, + } + }) + + ginkgo.It("should delete tag and have no errors", func() { + mockS3Client.On("DeleteObjectTagging", mock.Anything).Return(&s3.DeleteObjectTaggingOutput{}, nil) + err := bucket.DeleteTag("ObjecttKey1", "tagKey1") + + gomega.Expect(err).To(gomega.BeNil()) + }) + ginkgo.It("should have handle errors", func() { + mockS3Client.On("DeleteObjectTagging", mock.Anything).Return(&s3.DeleteObjectTaggingOutput{}, errors.New("error delete tag")) + err := bucket.DeleteTag("ObjecttKey1", "tagKey1") + + gomega.Expect(err).ToNot(gomega.BeNil()) + }) +}) diff --git a/code-engine-cos2cos/build.sh b/code-engine-cos2cos/build.sh new file mode 100755 index 00000000..48c53232 --- /dev/null +++ b/code-engine-cos2cos/build.sh @@ -0,0 +1,10 @@ +#!/bin/sh + +BUILD_NAME=example-script-build-cos2cos-${IMAGE_NAME} + +ibmcloud target -r ${CR_REGION} -g ${RESOURCE_GROUP} +ibmcloud ce project list +ibmcloud ce project select -n "${PROJECT_NAME}" + +ibmcloud ce build create --name ${BUILD_NAME} --build-type local --image ${REGISTRY}/${REGISTRY_NAMESPACE}/${IMAGE_NAME} +ibmcloud ce buildrun submit --build ${BUILD_NAME} --name ${BUILD_NAME}-build-run \ No newline at end of file diff --git a/code-engine-cos2cos/config.sh b/code-engine-cos2cos/config.sh new file mode 100755 index 00000000..17d16ed6 --- /dev/null +++ b/code-engine-cos2cos/config.sh @@ -0,0 +1,287 @@ +#!/bin/bash + +source data.sh + +GREEN='\033[0;32m' +RED='\033[0;31m' +NC='\033[0m' # No Color + +# function accept bucket_name, region, instance_name, resource_group, type of bucket[primary,secondary], iteration +function create_instance_bucket(){ + COS_BUCKET_NAME=$1 + COS_REGION=$2 + COS_INSTANCE_NAME=$3 + COS_RESOURCE_GROUP=$4 + TYPE=$5 + I=$6 + + echo "---" + echo "Step $I: Creating COS $TYPE Instances and Bucket" + echo "Step $I.1: $TYPE INSTANCE CREATION" + echo "Checking if $TYPE instance exists" + COS_INSTANCE_CRN=$(ibmcloud resource service-instance "${COS_INSTANCE_NAME}" --crn -q | head -n 1) + + if [ -z "$COS_INSTANCE_CRN" ]; then + echo "Creating COS Instance $TYPE in ${COS_REGION}" + COS_INSTANCE_CRN=$(ibmcloud resource service-instance-create "${COS_INSTANCE_NAME}" \ + cloud-object-storage standard global \ + -d premium-global-deployment-iam \ + -g "${COS_RESOURCE_GROUP}" | grep "ID:"| head -n 1 | awk '{print $2}') + + if [ -z "$COS_INSTANCE_CRN" ]; then + echo -e "${RED}Failure${NC}: Step $I.1: Could not create COS Instance" + exit 1 + fi + else + echo "COS Instance ${COS_INSTANCE_NAME} already exists." + fi + if [ $? -eq 0 ]; then + echo -e "${GREEN}Success${NC}: Step $I.1: $TYPE Instance creation: (${COS_INSTANCE_NAME}, ${COS_INSTANCE_CRN})" + fi + if [ $TYPE = "PRIMARY" ]; then + COS_INSTANCE_CRN_PRIMARY=${COS_INSTANCE_CRN} + else + COS_INSTANCE_CRN_SECONDARY=${COS_INSTANCE_CRN} + fi + + # Creating bucket in the instance - silent failure. + echo "Step $I.2: Creating $TYPE Bucket." + ibmcloud cos bucket-create \ + --bucket ${COS_BUCKET_NAME} \ + --class smart \ + --ibm-service-instance-id ${COS_INSTANCE_CRN} \ + --region ${COS_REGION} 2>/dev/null + + # Check if bucket exists. + ibmcloud cos bucket-head --bucket "$COS_BUCKET_NAME" --region "$COS_REGION" + if [ $? -ne 0 ]; then + echo -e "${RED}Failure${NC}: Step $I.2: $TYPE Bucket does not exists. Exiting..." + exit 1 + else + echo -e "${GREEN}Success${NC}: Step $I.2: $TYPE Bucket Found." + fi + +} + +# Creating PRIMARY Instance and Bucket +create_instance_bucket ${COS_BUCKET_NAME_PRIMARY} ${COS_REGION_PRIMARY} ${COS_INSTANCE_NAME_PRIMARY} ${COS_RESOURCE_GROUP_PRIMARY} "PRIMARY" "1" + +# Creating SECONDARY Instance and Bucket +create_instance_bucket ${COS_BUCKET_NAME_SECONDARY} ${COS_REGION_SECONDARY} ${COS_INSTANCE_NAME_SECONDARY} ${COS_RESOURCE_GROUP_SECONDARY} "SECONDARY" "2" + +# Create a Project if not exists. +echo "---" +echo "Step 3: Creating and selecting the project\n" +ibmcloud target -r ${PROJECT_REGION} -g ${PROJECT_RESOURCE_GROUP} +ibmcloud ce project create --name "${PROJECT_NAME}" -q +ibmcloud ce project select -n "${PROJECT_NAME}" +if [ $? -eq 0 ]; then + echo -e "${GREEN}SUCCESS${NC}: Step 3: Project Selected." +fi + +# Get Project CRN +echo "---" +echo "Step 4: Getting the Project CRN\n" +PROJECT_CRN=$(ibmcloud resource service-instance "${PROJECT_NAME}" --location "${PROJECT_REGION}" -g "${PROJECT_RESOURCE_GROUP}" -q --crn) +if [ $? -eq 0 ]; then + echo -e "${GREEN}SUCCESS${NC}: Step 4: Project CRN Fetched: $PROJECT_CRN" +fi +# Creating Trusted Profile +echo "---" +echo "Step 5: Creating/Fetching Trusted Profile $TRUSTED_PROFILE_NAME" +COS_TRUSTED_PROFILE_ID_PRIMARY=$(ibmcloud iam trusted-profile ${TRUSTED_PROFILE_NAME} --id) +if [ -z "$COS_TRUSTED_PROFILE_ID_PRIMARY" ] ; then + COS_TRUSTED_PROFILE_ID_PRIMARY=$(ibmcloud iam trusted-profile-create "${TRUSTED_PROFILE_NAME}" -o JSON | jq -r '.id') + if [ $? -ne 0 ]; then + echo -e "${RED}Failure${NC}: Step 5: Could not create trusted-profile.Exiting\n" + exit 1 + fi +fi +COS_TRUSTED_PROFILE_ID_SECONDARY=$(echo ${COS_TRUSTED_PROFILE_ID_PRIMARY}) +if [ $? -eq 0 ]; then + echo -e "${GREEN}SUCCESS${NC}: Step 5: Trusted Profile Created/Fetched.\n" +fi + +echo "-----" +echo "Step 6: Creating Secrets (Base Secret, Auth Secret, Registry Secret)" +echo "---" +echo "Step 6.1: Creating Base Secret: $BASE_SECRET" +ibmcloud ce secret create --name ${BASE_SECRET} \ + --from-literal SECONDARY_COS_BUCKET_NAME=${COS_BUCKET_NAME_SECONDARY} \ + --from-literal IBM_COS_RESOURCE_INSTANCE_ID_SECONDARY=${COS_INSTANCE_CRN_SECONDARY} \ + --from-literal IBM_COS_REGION_SECONDARY=${COS_REGION_SECONDARY} \ + --from-literal IBM_COS_ENDPOINT_SECONDARY=${COS_ENDPOINT_SECONDARY} \ + --from-literal PRIMARY_COS_BUCKET_NAME=${COS_BUCKET_NAME_PRIMARY} \ + --from-literal IBM_COS_RESOURCE_INSTANCE_ID_PRIMARY=${COS_INSTANCE_CRN_PRIMARY} \ + --from-literal IBM_COS_REGION_PRIMARY=${COS_REGION_PRIMARY} \ + --from-literal IBM_COS_ENDPOINT_PRIMARY=${COS_ENDPOINT_PRIMARY} \ + --from-literal BUCKET_TIMESTAMP_FILENAME=${BUCKET_TIMESTAMP_FILENAME} + +if [ $? -ne 0 ]; then + echo "Secret '${BASE_SECRET}' already exists." + read -p "Do you want to override the existing secret? (y/n): " confirm + if [[ "$confirm" =~ ^[Yy]$ ]]; then + echo "Updating secret ${BASE_SECRET}..." + ibmcloud ce secret update --name ${BASE_SECRET} \ + --from-literal SECONDARY_COS_BUCKET_NAME=${COS_BUCKET_NAME_SECONDARY} \ + --from-literal IBM_COS_RESOURCE_INSTANCE_ID_SECONDARY=${COS_INSTANCE_CRN_SECONDARY} \ + --from-literal IBM_COS_REGION_SECONDARY=${COS_REGION_SECONDARY} \ + --from-literal IBM_COS_ENDPOINT_SECONDARY=${COS_ENDPOINT_SECONDARY} \ + --from-literal PRIMARY_COS_BUCKET_NAME=${COS_BUCKET_NAME_PRIMARY} \ + --from-literal IBM_COS_RESOURCE_INSTANCE_ID_PRIMARY=${COS_INSTANCE_CRN_PRIMARY} \ + --from-literal IBM_COS_REGION_PRIMARY=${COS_REGION_PRIMARY} \ + --from-literal IBM_COS_ENDPOINT_PRIMARY=${COS_ENDPOINT_PRIMARY} \ + --from-literal BUCKET_TIMESTAMP_FILENAME=${BUCKET_TIMESTAMP_FILENAME} + + if [ $? -eq 0 ]; then + echo -e "${GREEN}SUCCESS${NC}: Step 6.1: Base secret update complete." + else + echo -e "${RED}ERROR${NC}: Failed to update secret." + exit 1 + fi + else + echo "Secret update cancelled by user. Exiting..." + exit 0 + fi +else + echo -e "${GREEN}SUCCESS${NC}: Step 6.1: Base secret creation complete." +fi + +# Creating a container registry secret +echo "---" +echo "Step 6.2: Creating Container Registry Secret: $CONTAINER_REGISTRY_SECRET" +echo "Step 6.2.1: Checking if API key '${API_KEY_NAME}' exists" +API_KEY=$(ibmcloud iam api-keys --output json | jq -r ".[] | select(.name == \"$API_KEY_NAME\") | .apikey") + +if [ -z "$API_KEY" ]; then + echo "API key '${API_KEY_NAME}' does not exist. Creating a new one..." + API_KEY=$(ibmcloud iam api-key-create ${API_KEY_NAME} -d "API Key for IBM CR COS-to-COS" -o JSON | jq -r '.apikey' ) + # ibmcloud iam api-key-create ${API_KEY_NAME} -d "API Key for IBM CR COS-to-COS" --file key_file + echo "API key '${API_KEY_NAME}' created." +else + echo "API key '${API_KEY_NAME}' already exists. Skipping creation." +fi + +ibmcloud ce secret create --name ${CONTAINER_REGISTRY_SECRET} --format registry --password ${API_KEY} --server ${REGISTRY_SERVER} +if [ $? -ne 0 ]; then + echo "Secret '${CONTAINER_REGISTRY_SECRET}' already exists." + read -p "Do you want to override the existing container registry secret? (y/n): " confirm + if [[ "$confirm" =~ ^[Yy]$ ]]; then + echo "Updating secret ${CONTAINER_REGISTRY_SECRET}..." + ibmcloud ce secret update --name ${CONTAINER_REGISTRY_SECRET} --password ${API_KEY} --server ${REGISTRY_SERVER} + + if [ $? -eq 0 ]; then + echo -e "${GREEN}SUCCESS${NC}: Step 6.2: Container Registry secret update complete." + else + echo -e "${RED}ERROR${NC}: Failed to update container registry secret." + exit 1 + fi + else + echo "Container registry secret update cancelled by user. Exiting..." + exit 0 + fi +else + echo -e "${GREEN}SUCCESS${NC}: Step 6.2: Container Registry secret creation complete." +fi +# Auth secrets +echo "---" +echo "Step 6.3: Creating Auth Secret: $AUTH_SECRET" +ibmcloud ce secret create --name ${AUTH_SECRET} \ + --from-literal IBM_COS_CRTokenFilePath_PRIMARY=${IBM_COS_CRTokenFilePath_PRIMARY} \ + --from-literal IBM_COS_CRTokenFilePath_SECONDARY=${IBM_COS_CRTokenFilePath_SECONDARY} \ + --from-literal IBM_COS_TRUSTED_PROFILE_ID_PRIMARY=${COS_TRUSTED_PROFILE_ID_PRIMARY} \ + --from-literal IBM_COS_TRUSTED_PROFILE_ID_SECONDARY=${COS_TRUSTED_PROFILE_ID_SECONDARY} +if [ $? -ne 0 ]; then + echo "Secret '${AUTH_SECRET}' already exists." + read -p "Do you want to override the existing auth secret? (y/n): " confirm + if [[ "$confirm" =~ ^[Yy]$ ]]; then + echo "Updating secret ${AUTH_SECRET}..." + ibmcloud ce secret update --name ${AUTH_SECRET} \ + --from-literal IBM_COS_CRTokenFilePath_PRIMARY=${IBM_COS_CRTokenFilePath_PRIMARY} \ + --from-literal IBM_COS_CRTokenFilePath_SECONDARY=${IBM_COS_CRTokenFilePath_SECONDARY} \ + --from-literal IBM_COS_TRUSTED_PROFILE_ID_PRIMARY=${COS_TRUSTED_PROFILE_ID_PRIMARY} \ + --from-literal IBM_COS_TRUSTED_PROFILE_ID_SECONDARY=${COS_TRUSTED_PROFILE_ID_SECONDARY} + + if [ $? -eq 0 ]; then + echo -e "${GREEN}SUCCESS${NC}: Step 6.3: Auth secret update complete." + else + echo -e "${RED}ERROR${NC}: Failed to update auth secret." + exit 1 + fi + else + echo "Auth secret update cancelled by user. Exiting..." + exit 0 + fi +else + echo -e "${GREEN}SUCCESS${NC}: Step 6.3: Auth secret creation complete." +fi +# Create a job +# Create the job with environment variables, including the bucket's region +echo "-----" +echo "Step 7: Creating JOB with name $JOB_NAME" +ibmcloud ce job create --name ${JOB_NAME} --image "${JOB_IMAGE}" \ + --registry-secret ${CONTAINER_REGISTRY_SECRET} \ + --env-from-secret ${BASE_SECRET} \ + --env-from-secret ${AUTH_SECRET} \ + --argument true 2>/dev/null +if [ $? -ne 0 ]; then + # echo "Job '${JOB_NAME}' already exists. Exiting" + # exit 1 + + echo "Job '${JOB_NAME}' already exists. Updating Job." + ibmcloud ce job update --name ${JOB_NAME} --image "${JOB_IMAGE}" \ + --registry-secret ${CONTAINER_REGISTRY_SECRET} \ + --env-from-secret ${BASE_SECRET} \ + --env-from-secret ${AUTH_SECRET} \ + --argument true 2>/dev/null +fi +if [ $? -eq 0 ]; then +echo -e "${GREEN}SUCCESS${NC}Step 7: Job Created" +fi + +# Create a link to a compute resource for a trusted profile +echo "-----" +echo "Step 8.1: Linking JOB To Trusted Profile" +ibmcloud iam trusted-profile-link-create ${TRUSTED_PROFILE_NAME} \ + --name ce-job-${JOB_NAME} \ + --cr-type CE \ + --link-crn ${PROJECT_CRN} \ + --link-component-type job \ + --link-component-name "${JOB_NAME}" 2>/dev/null + + +# Add IAM policies for bucket access - Grant access to the bucket the job needs +echo "Step 8.2: Linking Primary COS To Trusted Profile" +ibmcloud iam trusted-profile-policy-create ${TRUSTED_PROFILE_NAME} \ +--roles "Writer" \ +--service-name cloud-object-storage \ +--service-instance ${COS_INSTANCE_CRN_PRIMARY} 2>/dev/null +# echo "***** DONE: Linking Primary COS To Trusted Profile" + +echo "Step 8.3: Linking Secondary COS To Trusted Profile" +ibmcloud iam trusted-profile-policy-create ${TRUSTED_PROFILE_NAME} \ +--roles "Writer" \ +--service-name cloud-object-storage \ +--service-instance ${COS_INSTANCE_CRN_SECONDARY} 2>/dev/null +# echo "***** DONE: Linking Secondary COS To Trusted Profile" + +echo "Step 8.4: Compute Resource Token" +curl \ + --request PATCH "https://api.${PROJECT_REGION}.codeengine.cloud.ibm.com/v2/projects/$(ibmcloud ce project current --output json | jq -r .guid)/jobs/${JOB_NAME}" \ + --header 'Accept: application/json' \ + --header "Authorization: $(ibmcloud iam oauth-tokens --output json | jq -r '.iam_token')" \ + --header 'Content-Type: application/merge-patch+json' \ + --header 'If-Match: *' \ + --data-raw "{ + \"run_compute_resource_token_enabled\": true + }" 2>/dev/null +# echo "******* DONE: Compute Resource Token *******" + +# echo "-----" +# echo "Step 9" +# # Submit the job +# echo "LIVE !" +# echo " ********** Submitting the job and Logs *********" +# RANDOM_CODE=$(printf "%06d" $((RANDOM % 1000000))) +# ibmcloud ce jobrun submit --job ${JOB_NAME} --name ${JOB_NAME}-${RANDOM_CODE} +# ibmcloud ce jobrun logs --name ${JOB_NAME}-${RANDOM_CODE} --follow diff --git a/code-engine-cos2cos/data.sh b/code-engine-cos2cos/data.sh new file mode 100644 index 00000000..26348190 --- /dev/null +++ b/code-engine-cos2cos/data.sh @@ -0,0 +1,57 @@ +#!/bin/sh +# This can be modified as per the User preference. + +# Image details +CR_REGION=eu-es +RESOURCE_GROUP=demo-rg +REGISTRY=private.es.icr.io +REGISTRY_NAMESPACE=dummy +IMAGE_NAME=dev-cos2cos1 +PROJECT_NAME="CE Internship" + +# Regions +COS_REGION_PRIMARY=au-syd +COS_REGION_SECONDARY=eu-es +PROJECT_REGION=eu-es +PROJECT_RESOURCE_GROUP=demo-rg +PROJECT_NAME="Cos2Cos" + +# Resource groups +COS_RESOURCE_GROUP_PRIMARY=demo-rg-primary +COS_RESOURCE_GROUP_SECONDARY=demo-rg-secondary + +#Creating a COS Instance +COS_INSTANCE_NAME_PRIMARY=cos-instance-one +COS_INSTANCE_NAME_SECONDARY=cos-instance-two + +# Bucket names +COS_BUCKET_NAME_PRIMARY=cos-bucket-one +COS_BUCKET_NAME_SECONDARY=cos-bucket-two + +# Endpoints +COS_ENDPOINT_PRIMARY=s3.au-syd.cloud-object-storage.appdomain.cloud +COS_ENDPOINT_SECONDARY=s3.eu-es.cloud-object-storage.appdomain.cloud +# Timestamp +BUCKET_TIMESTAMP_FILENAME=last_modified_time.json + +# CRTokenFilePath +IBM_COS_CRTokenFilePath_PRIMARY=/var/run/secrets/codeengine.cloud.ibm.com/compute-resource-token/token +IBM_COS_CRTokenFilePath_SECONDARY=/var/run/secrets/codeengine.cloud.ibm.com/compute-resource-token/token + +# Trusted Profile +TRUSTED_PROFILE_NAME=demo-trusted-profile + +# Jobs +JOB_NAME=cos2cos-job +JOB_IMAGE=private.es.icr.io/cos2cos/dev-cos2cos + +# Define registry credentials +REGISTRY_SERVER=private.es.icr.io +BASE_SECRET=ce-secret +AUTH_SECRET=auth-secret +CONTAINER_REGISTRY_SECRET=container-registry-secret + +API_KEY_NAME=api-key + +# Array Size of Job run Instance +ARRAY_SIZE=13 \ No newline at end of file diff --git a/code-engine-cos2cos/env_sample b/code-engine-cos2cos/env_sample new file mode 100644 index 00000000..36cbfd07 --- /dev/null +++ b/code-engine-cos2cos/env_sample @@ -0,0 +1,33 @@ +IBM_COS_API_KEY_PRIMARY=<> +IBM_COS_RESOURCE_INSTANCE_ID_PRIMARY=<> +IBM_COS_ENDPOINT_PRIMARY=<> +IBM_COS_REGION_PRIMARY= <> +PRIMARY_COS_BUCKET_NAME=<> + +IBM_COS_API_KEY_SECONDARY=<> +IBM_COS_ENDPOINT_SECONDARY=<> +IBM_COS_RESOURCE_INSTANCE_ID_SECONDARY=<> +IBM_COS_REGION_SECONDARY=<> +SECONDARY_COS_BUCKET_NAME=<> + +BUCKET_TIMESTAMP_FILENAME=temp/last_modified_time.json + +# Primary Bucket Trusted Profile Credentials +IBM_COS_TRUSTED_PROFILE_ID_PRIMARY= <> +IBM_COS_CRTokenFilePath_PRIMARY=/var/run/secrets/tokens/service-account-token +IBM_COS_RESOURCE_INSTANCE_ID_PRIMARY=<> +IBM_COS_ENDPOINT_PRIMARY=<> +IBM_COS_REGION_PRIMARY=<> + +# Secondary Bucket Trusted Profile Credentials +IBM_COS_TRUSTED_PROFILE_ID_SECONDARY= <> +IBM_COS_CRTokenFilePath_SECONDARY=/var/run/secrets/tokens/service-account-token +IBM_COS_RESOURCE_INSTANCE_ID_SECONDARY=<> +IBM_COS_REGION_SECONDARY=<> + +# Bucket Names +# PRIMARY_COS_BUCKET_NAME=<> +# SECONDARY_COS_BUCKET_NAME=<> + +# IBM Auth Endpoint (Typically you don't need to change this) +IBM_AUTH_ENDPOINT=https://iam.cloud.ibm.com/identity/token diff --git a/code-engine-cos2cos/go.mod b/code-engine-cos2cos/go.mod new file mode 100644 index 00000000..7afa0abf --- /dev/null +++ b/code-engine-cos2cos/go.mod @@ -0,0 +1,49 @@ +module ibm.com/codeengine/cos2cos + +go 1.23.5 + +require ( + github.com/IBM/ibm-cos-sdk-go v1.12.1 + github.com/joho/godotenv v1.5.1 + github.com/onsi/ginkgo v1.16.5 + github.com/onsi/ginkgo/v2 v2.23.0 + github.com/onsi/gomega v1.36.2 + github.com/stretchr/testify v1.10.0 +) + +require ( + github.com/IBM/go-sdk-core/v5 v5.18.5 // indirect + github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/fsnotify/fsnotify v1.6.0 // indirect + github.com/gabriel-vasile/mimetype v1.4.3 // indirect + github.com/go-logr/logr v1.4.2 // indirect + github.com/go-openapi/errors v0.21.0 // indirect + github.com/go-openapi/strfmt v0.22.1 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-playground/validator/v10 v10.19.0 // indirect + github.com/go-task/slim-sprig/v3 v3.0.0 // indirect + github.com/google/go-cmp v0.7.0 // indirect + github.com/google/pprof v0.0.0-20250302191652-9094ed2288e7 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/hashicorp/go-cleanhttp v0.5.2 // indirect + github.com/hashicorp/go-retryablehttp v0.7.7 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/leodido/go-urn v1.4.0 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/nxadm/tail v1.4.8 // indirect + github.com/oklog/ulid v1.3.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/stretchr/objx v0.5.2 // indirect + go.mongodb.org/mongo-driver v1.14.0 // indirect + golang.org/x/crypto v0.36.0 // indirect + golang.org/x/net v0.37.0 // indirect + golang.org/x/sys v0.31.0 // indirect + golang.org/x/text v0.23.0 // indirect + golang.org/x/tools v0.31.0 // indirect + google.golang.org/protobuf v1.36.5 // indirect + gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/code-engine-cos2cos/go.sum b/code-engine-cos2cos/go.sum new file mode 100644 index 00000000..1245e5fc --- /dev/null +++ b/code-engine-cos2cos/go.sum @@ -0,0 +1,164 @@ +github.com/IBM/go-sdk-core/v5 v5.18.5 h1:g0JRl3sYXJczB/yuDlrN6x22LJ6jIxhp0Sa4ARNW60c= +github.com/IBM/go-sdk-core/v5 v5.18.5/go.mod h1:KonTFRR+8ZSgw5cxBSYo6E4WZoY1+7n1kfHM82VcjFU= +github.com/IBM/ibm-cos-sdk-go v1.12.1 h1:pWs5c5/j9PNJE1lIQhYtzpdCxu2fpvCq9PHs6/nDjyI= +github.com/IBM/ibm-cos-sdk-go v1.12.1/go.mod h1:7vmUThyAq4+AD1eEyGZi90ir06Z9YhsEzLBsdGPfcqo= +github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= +github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= +github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= +github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= +github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-openapi/errors v0.21.0 h1:FhChC/duCnfoLj1gZ0BgaBmzhJC2SL/sJr8a2vAobSY= +github.com/go-openapi/errors v0.21.0/go.mod h1:jxNTMUxRCKj65yb/okJGEtahVd7uvWnuWfj53bse4ho= +github.com/go-openapi/strfmt v0.22.1 h1:5Ky8cybT4576C6Ffc+8gYji/wRXCo6Ozm8RaWjPI6jc= +github.com/go-openapi/strfmt v0.22.1/go.mod h1:OfVoytIXJasDkkGvkb1Cceb3BPyMOwk1FgmyyEw7NYg= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4= +github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= +github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= +github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= +github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/pprof v0.0.0-20250302191652-9094ed2288e7 h1:+J3r2e8+RsmN3vKfo75g0YSY61ms37qzPglu4p0sGro= +github.com/google/pprof v0.0.0-20250302191652-9094ed2288e7/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= +github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU= +github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= +github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= +github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= +github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= +github.com/onsi/ginkgo/v2 v2.23.0 h1:FA1xjp8ieYDzlgS5ABTpdUDB7wtngggONc8a7ku2NqQ= +github.com/onsi/ginkgo/v2 v2.23.0/go.mod h1:zXTP6xIp3U8aVuXN8ENK9IXRaTjFnpVB9mGmaSRvxnM= +github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= +github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8= +github.com/onsi/gomega v1.36.2/go.mod h1:DdwyADRjrc825LhMEkD76cHR5+pUnjhUN8GlHlRPHzY= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80= +go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34= +golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c= +golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= +golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= +golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.31.0 h1:0EedkvKDbh+qistFTd0Bcwe/YLh4vHwWEkiI0toFIBU= +golang.org/x/tools v0.31.0/go.mod h1:naFTU+Cev749tSJRXJlna0T3WxKvb1kWEx15xA4SdmQ= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM= +google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/code-engine-cos2cos/main.go b/code-engine-cos2cos/main.go new file mode 100644 index 00000000..3a6874d9 --- /dev/null +++ b/code-engine-cos2cos/main.go @@ -0,0 +1,110 @@ +package main + +import ( + "fmt" + "os" + + "github.com/IBM/ibm-cos-sdk-go/service/s3" + "github.com/joho/godotenv" + "ibm.com/codeengine/cos2cos/bucketOperations" + "ibm.com/codeengine/cos2cos/process" +) + +// Note: When running the program in local env, pass false, false as cmd-line arg. +// Arg1: isInCodeEngine <- to load the env file from local Os. +// Arg2: isUsingTrustedProfile <- Works only with kubernetes cluster. +func main() { + fmt.Println("Starting application with Job Index:", os.Getenv("JOB_INDEX")) + + // Default Values for running the program + // Use service credentials and + var isInCodeEngine bool = true + var isUsingTrustedProfile bool = true + + // First argument states if the program is running in code engine env. (Default: Yes) + if len(os.Args) > 1 { + isInCodeEngine = os.Args[1] == "true" + if !isInCodeEngine { + os.Setenv("JOB_ARRAY_SIZE", "1") + os.Setenv("JOB_INDEX", "0") + } + } + + // Second argument states if the program uses trustedProfile (Default: Yes) + if len(os.Args) > 2 { + isUsingTrustedProfile = os.Args[2] == "true" + } + + if !isInCodeEngine { + err := godotenv.Load() + if err != nil { + fmt.Println("Error loading env file") + return + } + } + + fmt.Println("Any errors while processing objects:", processingMain(isUsingTrustedProfile)) + + fmt.Println("Clsoing application") +} + +// Function will initiate the buckets, sessions and trigger the StartProcessing function for actual processing +// accepts a boolean argument which decides how to create COS-Client (Using TrustedProfile or Service Credentials) +func processingMain(useTrustedProfile bool) error { + // fmt.Println("hehe") + + var cosClient_PRIMARY, cosClient_SECONDARY *s3.S3 + + if !useTrustedProfile { + // Create a cosClient using Service Credentials + cosClient_PRIMARY = bucketOperations.NewCosClient(os.Getenv("IBM_COS_API_KEY_PRIMARY"), + os.Getenv("IBM_COS_RESOURCE_INSTANCE_ID_PRIMARY"), + os.Getenv("IBM_COS_ENDPOINT_PRIMARY"), + os.Getenv("IBM_COS_REGION_PRIMARY"), + ) + + cosClient_SECONDARY = bucketOperations.NewCosClient(os.Getenv("IBM_COS_API_KEY_SECONDARY"), + os.Getenv("IBM_COS_RESOURCE_INSTANCE_ID_SECONDARY"), + os.Getenv("IBM_COS_ENDPOINT_SECONDARY"), + os.Getenv("IBM_COS_REGION_SECONDARY"), + ) + } else { + // Create a cosClient using Trusted Profile + cosClient_PRIMARY = bucketOperations.NewCosClientTrustedProfile(os.Getenv("IBM_COS_TRUSTED_PROFILE_ID_PRIMARY"), + os.Getenv("IBM_COS_CRTokenFilePath_PRIMARY"), + os.Getenv("IBM_COS_RESOURCE_INSTANCE_ID_PRIMARY"), + os.Getenv("IBM_COS_ENDPOINT_PRIMARY"), + os.Getenv("IBM_COS_REGION_PRIMARY"), + ) + + cosClient_SECONDARY = bucketOperations.NewCosClientTrustedProfile(os.Getenv("IBM_COS_TRUSTED_PROFILE_ID_SECONDARY"), + os.Getenv("IBM_COS_CRTokenFilePath_SECONDARY"), + os.Getenv("IBM_COS_RESOURCE_INSTANCE_ID_SECONDARY"), + os.Getenv("IBM_COS_ENDPOINT_SECONDARY"), + os.Getenv("IBM_COS_REGION_SECONDARY"), + ) + } + + // Creating Primary bucket instance + primary_bucket := bucketOperations.NewBucket( + os.Getenv("PRIMARY_COS_BUCKET_NAME"), + cosClient_PRIMARY, + ) + + // Creating secondary bucket instance + secondary_bucket := bucketOperations.NewBucket( + os.Getenv("SECONDARY_COS_BUCKET_NAME"), + cosClient_SECONDARY, + ) + // fmt.Println("hehe") + // Initiate the process from primary bucket to secondary bucket + // err := process.StartProcessing(primary_bucket, secondary_bucket) + err := process.StartProcessingPagination(primary_bucket, secondary_bucket) + if err != nil { + return fmt.Errorf("\nBackup failed: %v", err) + } + + fmt.Println("\nBackup completed successfully.") + + return nil +} diff --git a/code-engine-cos2cos/main_test.go b/code-engine-cos2cos/main_test.go new file mode 100644 index 00000000..8fe0fdfc --- /dev/null +++ b/code-engine-cos2cos/main_test.go @@ -0,0 +1,13 @@ +package main + +import ( + "testing" + + "github.com/onsi/ginkgo" + "github.com/onsi/gomega" +) + +func TestMyApp(t *testing.T) { + gomega.RegisterFailHandler(ginkgo.Fail) + ginkgo.RunSpecs(t, "COS Suite") +} diff --git a/code-engine-cos2cos/metadata/local-timestamp.go b/code-engine-cos2cos/metadata/local-timestamp.go new file mode 100644 index 00000000..24179e2b --- /dev/null +++ b/code-engine-cos2cos/metadata/local-timestamp.go @@ -0,0 +1,49 @@ +package metadata + +// This package deals with storing, retriving and deleting timestamp in a local file/Environment. +/* +import ( + "encoding/json" + "log" + "os" +) + +const metadataFile = "metadata.json" + +type Metadata struct { + Objects map[string]string `json:"objects"` // ObjectKey -> LastModified +} + +// Load metadata from local file +func LoadMetadata() (Metadata, error) { + data := Metadata{Objects: make(map[string]string)} + file, err := os.ReadFile(metadataFile) + if err != nil { + if os.IsNotExist(err) { + return data, nil + } + return data, err + } + err = json.Unmarshal(file, &data) + return data, err +} + +// Save metadata to local file +func SaveMetadata(data Metadata) error { + fileData, err := json.MarshalIndent(data, "", " ") + if err != nil { + return err + } + return os.WriteFile(metadataFile, fileData, 0644) +} + +// Delete metadata file after processing +func DeleteMetadataFile() { + err := os.Remove(metadataFile) + if err != nil { + log.Println("Error deleting metadata file:", err) + } else { + log.Println("Metadata file deleted successfully.") + } +} +*/ diff --git a/code-engine-cos2cos/metadata/timestamp.go b/code-engine-cos2cos/metadata/timestamp.go new file mode 100644 index 00000000..844c25a7 --- /dev/null +++ b/code-engine-cos2cos/metadata/timestamp.go @@ -0,0 +1,102 @@ +package metadata + +import ( + "encoding/json" + "fmt" + "os" + "time" + + "ibm.com/codeengine/cos2cos/bucketOperations" + "ibm.com/codeengine/cos2cos/utils" +) + +type TimestampData struct { + Timestamp string `json:"timestamp"` +} + +// Function to get the last time of process/processing +// If no argument is passed, It will try to find the file locally +// Pass bucket as a argument to fetch from bucket +// Note: Function will not return error if file is not found +func GetLastProcessTime(buckets ...*bucketOperations.Bucket) time.Time { + + // Function to load the timestamp file + file, err := LoadTimestamp(buckets) + + // If err, meaning either I/O issue or no such file/object exist + // So return zero valued time + if err != nil { + return time.Time{} + } + + var data TimestampData + err = json.Unmarshal(file, &data) + + if err != nil { + return time.Time{} + } + + timestamp, err := time.Parse(time.RFC3339, data.Timestamp) + + if err != nil { + return time.Time{} + } + + return timestamp +} + +// Function to load timestamp file locally or using a object in Bucket +func LoadTimestamp(buckets []*bucketOperations.Bucket) ([]byte, error) { + + filename := os.Getenv("BUCKET_TIMESTAMP_FILENAME") + if len(buckets) > 0 { + bucket := buckets[0] + res, err := bucket.GetObject(filename) + + if err != nil { + return nil, err + } + + return utils.ConvertObjectToByte(res) + } + // To load the timestamp metadata locally + file, err := os.ReadFile(filename) + + return file, err + + // TODO: Add functionality to get modified time directly from bucket object instead of file. (saves processing) +} + +// Function expects a timestamp as string in format "time.RFC3339" +func PutLastBackupTime(timestamp string, bucket ...*bucketOperations.Bucket) error { + filename := os.Getenv("BUCKET_TIMESTAMP_FILENAME") + + if timestamp == "" { + timestamp = time.Now().Format(time.RFC3339) + } + + data := TimestampData{ + Timestamp: timestamp, + } + + jsonData, err := json.Marshal(data) + + if err != nil { + return fmt.Errorf("error marshaling JSON: %w", err) + } + + return saveTimestamp(filename, jsonData, bucket) +} + +// Function to update/create the file having last time process occured. +func saveTimestamp(filename string, jsonData []byte, bucket []*bucketOperations.Bucket) error { + if len(bucket) > 0 { + b := bucket[0] + + return b.UploadBytesToBucket(filename, jsonData) + } + // To save as local file, will overwrite the existing file + return os.WriteFile(filename, jsonData, 0644) + + // TODO: save it as object in secondary bucket +} diff --git a/code-engine-cos2cos/metadata/timestamp_test.go b/code-engine-cos2cos/metadata/timestamp_test.go new file mode 100644 index 00000000..c47d6c08 --- /dev/null +++ b/code-engine-cos2cos/metadata/timestamp_test.go @@ -0,0 +1,218 @@ +package metadata_test + +import ( + "errors" + "fmt" + "strings" + "testing" + "time" + + "github.com/IBM/ibm-cos-sdk-go/aws" + "github.com/IBM/ibm-cos-sdk-go/service/s3" + "github.com/onsi/ginkgo" + "github.com/onsi/gomega" + "github.com/stretchr/testify/mock" + "ibm.com/codeengine/cos2cos/bucketOperations" + "ibm.com/codeengine/cos2cos/metadata" + mockS3 "ibm.com/codeengine/cos2cos/mock" +) + +func TestMetadata(t *testing.T) { + gomega.RegisterFailHandler(ginkgo.Fail) + ginkgo.RunSpecs(t, "Metadata Suite") +} + +var _ = ginkgo.Describe("LoadTimestamp", func() { + + var ( + mockS3Client *mockS3.MockS3Client + bucket *bucketOperations.Bucket + ) + ginkgo.BeforeEach(func() { + mockS3Client = new(mockS3.MockS3Client) + bucket = &bucketOperations.Bucket{ + Name: "test-bucket", + Client: mockS3Client, + } + }) + type LoadTimestampTestCase struct { + MockGetObjectResponse *s3.GetObjectOutput + MockGetObjectError error + MockReadFileResponse []byte + MockReadFileError error + ExpectedResult []byte + ExpectedError error + } + // Test cases + var testCases = []LoadTimestampTestCase{ + { + MockGetObjectResponse: &s3.GetObjectOutput{ + Body: aws.ReadSeekCloser(strings.NewReader("mocked-content")), + }, + MockGetObjectError: nil, + MockReadFileResponse: nil, + MockReadFileError: nil, + ExpectedResult: []byte("mocked-content"), + ExpectedError: nil, + }, + { + MockGetObjectResponse: &s3.GetObjectOutput{ + Body: aws.ReadSeekCloser(strings.NewReader("mocked-byte-data")), + }, + MockGetObjectError: nil, + MockReadFileResponse: nil, + MockReadFileError: nil, + ExpectedResult: []byte("mocked-byte-data"), + ExpectedError: nil, + }, + { + + MockGetObjectResponse: nil, + MockGetObjectError: errors.New("failed to get object"), + MockReadFileResponse: nil, + MockReadFileError: nil, + ExpectedResult: nil, + ExpectedError: errors.New("failed to get object"), + }, + } + + // Running the test cases + for _, tc := range testCases { + tc := tc + ginkgo.It("should handle the loadTimestamp correctly", func() { + // Mocking GetObject for the bucket + + mockS3Client.On("GetObject", mock.Anything).Return(tc.MockGetObjectResponse, tc.MockGetObjectError) + + // Calling the loadTimestamp function + data, err := metadata.LoadTimestamp([]*bucketOperations.Bucket{bucket}) + + // Asserting the result + gomega.Expect(data).To(gomega.Equal(tc.ExpectedResult)) + if tc.ExpectedError != nil { + gomega.Expect(err).To(gomega.MatchError(tc.ExpectedError)) + } else { + gomega.Expect(err).To(gomega.BeNil()) + } + + // Assert mock expectations + + mockS3Client.AssertExpectations(ginkgo.GinkgoT()) + }) + } +}) + +var _ = ginkgo.Describe("GetLastProcessTime", func() { + + var ( + mockS3Client *mockS3.MockS3Client + bucket *bucketOperations.Bucket + ) + ginkgo.BeforeEach(func() { + mockS3Client = new(mockS3.MockS3Client) + bucket = &bucketOperations.Bucket{ + Name: "test-bucket", + Client: mockS3Client, + } + }) + type GetLastProcessTimeTestCase struct { + MockGetObjectResponse *s3.GetObjectOutput + MockGetObjectError error + ExpectedResult time.Time + } + + // Test cases + var testCases = []GetLastProcessTimeTestCase{ + { + MockGetObjectResponse: &s3.GetObjectOutput{ + Body: aws.ReadSeekCloser(strings.NewReader(`{"timestamp":"2025-04-07T09:49:42Z"}`)), + }, + MockGetObjectError: nil, + ExpectedResult: parseTime("2025-04-07T09:49:42Z"), + }, + { + MockGetObjectResponse: &s3.GetObjectOutput{ + Body: nil, + }, + MockGetObjectError: errors.New("No such file found"), + ExpectedResult: time.Time{}, + }, + } + + // Running the test cases + for _, tc := range testCases { + tc := tc + ginkgo.It("should handle the GetLastProcessTime correctly", func() { + // Mocking GetObject for the bucket + + mockS3Client.On("GetObject", mock.Anything).Return(tc.MockGetObjectResponse, tc.MockGetObjectError) + + // Calling the loadTimestamp function + data := metadata.GetLastProcessTime(bucket) + + // Asserting the result + gomega.Expect(data).To(gomega.Equal(tc.ExpectedResult)) + + // Assert mock expectations + + mockS3Client.AssertExpectations(ginkgo.GinkgoT()) + }) + } +}) + +func parseTime(timeString string) time.Time { + res, _ := (time.Parse(time.RFC3339, timeString)) + return res +} + +var _ = ginkgo.Describe("PutLastBackupTime", func() { + var ( + mockS3Client *mockS3.MockS3Client + bucket *bucketOperations.Bucket + ) + ginkgo.BeforeEach(func() { + mockS3Client = new(mockS3.MockS3Client) + bucket = &bucketOperations.Bucket{ + Name: "test-bucket", + Client: mockS3Client, + } + }) + type PutLastProcessTimeTestCase struct { + InputTimeStamp string + MockPutObjectError error + ExpectedResult error + } + + // Test cases + var testCases = []PutLastProcessTimeTestCase{ + { + InputTimeStamp: "2025-04-07T09:49:42Z", + MockPutObjectError: nil, + ExpectedResult: nil, + }, + { + InputTimeStamp: "", + MockPutObjectError: nil, + ExpectedResult: nil, + }, + } + + for _, tc := range testCases { + tc := tc + ginkgo.It("should handle the PutLastProcessTime correctly", func() { + // Mocking GetObject for the bucket + + mockS3Client.On("PutObject", mock.Anything).Return(&s3.PutObjectOutput{}, tc.MockPutObjectError) + + // Calling the loadTimestamp function + err := metadata.PutLastBackupTime(tc.InputTimeStamp, bucket) + fmt.Println(err) + // Asserting the result + gomega.Expect(err).To(gomega.BeNil()) + + // Assert mock expectations + + mockS3Client.AssertExpectations(ginkgo.GinkgoT()) + }) + } +}) diff --git a/code-engine-cos2cos/mock/mockS3Client.go b/code-engine-cos2cos/mock/mockS3Client.go new file mode 100644 index 00000000..3eedf0f6 --- /dev/null +++ b/code-engine-cos2cos/mock/mockS3Client.go @@ -0,0 +1,72 @@ +package mockS3 + +import ( + "github.com/IBM/ibm-cos-sdk-go/service/s3" + "github.com/stretchr/testify/mock" +) + +// The mock client +type MockS3Client struct { + mock.Mock +} + +func (m *MockS3Client) HeadObject(input *s3.HeadObjectInput) (*s3.HeadObjectOutput, error) { + args := m.Called(input) + return args.Get(0).(*s3.HeadObjectOutput), args.Error(1) +} + +func (m *MockS3Client) ListBuckets(input *s3.ListBucketsInput) (*s3.ListBucketsOutput, error) { + args := m.Called(input) + return args.Get(0).(*s3.ListBucketsOutput), args.Error(1) +} + +func (m *MockS3Client) ListObjectsV2(input *s3.ListObjectsV2Input) (*s3.ListObjectsV2Output, error) { + args := m.Called(input) + return args.Get(0).(*s3.ListObjectsV2Output), args.Error(1) +} + +func (m *MockS3Client) GetObject(input *s3.GetObjectInput) (*s3.GetObjectOutput, error) { + args := m.Called(input) + return args.Get(0).(*s3.GetObjectOutput), args.Error(1) +} + +func (m *MockS3Client) PutObject(input *s3.PutObjectInput) (*s3.PutObjectOutput, error) { + args := m.Called(input) + return args.Get(0).(*s3.PutObjectOutput), args.Error(1) +} + +func (m *MockS3Client) GetObjectTagging(input *s3.GetObjectTaggingInput) (*s3.GetObjectTaggingOutput, error) { + args := m.Called(input) + return args.Get(0).(*s3.GetObjectTaggingOutput), args.Error(1) +} + +func (m *MockS3Client) PutObjectTagging(input *s3.PutObjectTaggingInput) (*s3.PutObjectTaggingOutput, error) { + args := m.Called(input) + return args.Get(0).(*s3.PutObjectTaggingOutput), args.Error(1) +} + +func (m *MockS3Client) DeleteObjectTagging(input *s3.DeleteObjectTaggingInput) (*s3.DeleteObjectTaggingOutput, error) { + args := m.Called(input) + return args.Get(0).(*s3.DeleteObjectTaggingOutput), args.Error(1) +} + +func (m *MockS3Client) DeleteObject(input *s3.DeleteObjectInput) (*s3.DeleteObjectOutput, error) { + args := m.Called(input) + return args.Get(0).(*s3.DeleteObjectOutput), args.Error(1) +} + +func (m *MockS3Client) ListObjectsV2Pages(input *s3.ListObjectsV2Input, fn func(*s3.ListObjectsV2Output, bool) bool) error { + args := m.Called(input) + + // Simulate paginated responses if needed + pages := args.Get(0).([]*s3.ListObjectsV2Output) + for i, page := range pages { + isLastPage := i == len(pages)-1 + cont := fn(page, isLastPage) + if !cont { + break + } + } + + return args.Error(1) +} diff --git a/code-engine-cos2cos/process/process.go b/code-engine-cos2cos/process/process.go new file mode 100644 index 00000000..4f22961f --- /dev/null +++ b/code-engine-cos2cos/process/process.go @@ -0,0 +1,229 @@ +package process + +import ( + "fmt" + "os" + "strconv" + "strings" + "time" + + "ibm.com/codeengine/cos2cos/bucketOperations" + "ibm.com/codeengine/cos2cos/metadata" + "ibm.com/codeengine/cos2cos/userDefinedProcess" + "ibm.com/codeengine/cos2cos/utils" +) + +// Function to List all the objects that are required for processing +func ListAllObjectsForProcessing(b *bucketOperations.Bucket, last_time_modified time.Time) ([]string, error) { + // Get all objects + objects, err := b.ListBucketObjects() + // objectsAllPages, err := b.ListBucketObjectsPagination() + + if err != nil { + return nil, fmt.Errorf("could not fetch bucket:%s objects\nError:%w", b.Name, err) + } + + array_size, _ := strconv.ParseInt(os.Getenv("JOB_ARRAY_SIZE"), 10, 32) + cur_job_index, _ := strconv.ParseInt(os.Getenv("JOB_INDEX"), 10, 32) + + requiredObjectsKey := make([]string, 0) + + // for _, object := range objects.Contents { + for _, object := range objects.Contents { + jobIndex := getObjectIndex(*object.Key, array_size) + // jobIndex := getObjectIndex(object, array_size) + if jobIndex == cur_job_index { + exists, err := b.CheckIfTagExists(*object.Key) + // exists, err := b.CheckIfTagExists(object) + if err != nil { + return nil, err + } + if bucketOperations.IsProcessingRequired(object, last_time_modified) && !exists { + requiredObjectsKey = append(requiredObjectsKey, *object.Key) + } + } + } + return requiredObjectsKey, nil +} + +func ListAllObjectsForProcessingPagination(b *bucketOperations.Bucket, last_time_modified time.Time) ([]string, error) { + // Get all objects + // objects, err := b.ListBucketObjects() + objectsAllPages, err := b.ListBucketObjectsPagination() + fmt.Print(objectsAllPages) + if err != nil { + return nil, fmt.Errorf("could not fetch bucket:%s objects\nError:%w", b.Name, err) + } + + array_size, _ := strconv.ParseInt(os.Getenv("JOB_ARRAY_SIZE"), 10, 32) + cur_job_index, _ := strconv.ParseInt(os.Getenv("JOB_INDEX"), 10, 32) + + requiredObjectsKey := make([]string, 0) + + // for _, object := range objects.Contents { + for _, object := range objectsAllPages.Contents { + jobIndex := getObjectIndex(*object.Key, array_size) + // jobIndex := getObjectIndex(object, array_size) + if jobIndex == cur_job_index { + exists, err := b.CheckIfTagExists(*object.Key) + // exists, err := b.CheckIfTagExists(object) + if err != nil { + return nil, err + } + if bucketOperations.IsProcessingRequired(object, last_time_modified) && !exists { + requiredObjectsKey = append(requiredObjectsKey, *object.Key) + } + } + } + return requiredObjectsKey, nil +} + +// The function returns the job index that will be used to process the object +// Based on Alphabetical Order. Should be modified by User as per use-case. +func getObjectIndex(object string, array_size int64) int64 { + object = strings.ToLower(object) + + num := int64(object[0] - 'a') + + return num % array_size + // chunk_size := math.Ceil((26 / array_size)) + + // return int(float64(num) / chunk_size) +} + +func StartProcessing(primary_bucket *bucketOperations.Bucket, secondary_bucket *bucketOperations.Bucket) error { + // Get the time of process which will be updated later on + newBackupTime := time.Now().Format(time.RFC3339) + + // Passing primary_bucket b to get the timestamp from secondary_bucket + last_time_modified := metadata.GetLastProcessTime(secondary_bucket) + + // Get the list of all the objects that are modified + keys, err := ListAllObjectsForProcessing(primary_bucket, last_time_modified) + + if err != nil { + return fmt.Errorf("error Fetching Objects for processing: %w", err) + } + fmt.Println("\n*** Objects that are to be processed:\n", keys) + // Channels for each process object that gives bool values as return type + errorChans := make([]chan error, 0) + + for _, key := range keys { + errorChan := make(chan error) + errorChans = append(errorChans, errorChan) + // Call the process function + // This function will fetch the object as well as save it back to another bucket + go processObject(key, primary_bucket, secondary_bucket, errorChan) + } + + //Wait for all the processObject calls + var isAllSuccess bool = true + for index := range len(errorChans) { + err := <-errorChans[index] + if err != nil { + fmt.Println("Error processing object:", err) + isAllSuccess = false + } + } + if !isAllSuccess { + return err + } + // Update the metadata + // Passing secondary_bucket to store the file in secondary_bucket + metadata.PutLastBackupTime(newBackupTime, secondary_bucket) + + return nil +} + +func StartProcessingPagination(primary_bucket *bucketOperations.Bucket, secondary_bucket *bucketOperations.Bucket) error { + // Get the time of process which will be updated later on + newBackupTime := time.Now().Format(time.RFC3339) + + // Passing primary_bucket b to get the timestamp from secondary_bucket + last_time_modified := metadata.GetLastProcessTime(secondary_bucket) + + // Get the list of all the objects that are modified + keys, err := ListAllObjectsForProcessingPagination(primary_bucket, last_time_modified) + + if err != nil { + return fmt.Errorf("error Fetching Objects for processing: %w", err) + } + fmt.Println("\n*** Objects that are to be processed:\n", keys) + // Channels for each process object that gives bool values as return type + errorChans := make([]chan error, 0) + + for _, key := range keys { + errorChan := make(chan error) + errorChans = append(errorChans, errorChan) + // Call the process function + // This function will fetch the object as well as save it back to another bucket + go processObject(key, primary_bucket, secondary_bucket, errorChan) + } + + //Wait for all the processObject calls + var isAllSuccess bool = true + for index := range len(errorChans) { + err := <-errorChans[index] + if err != nil { + fmt.Println("Error processing object:", err) + isAllSuccess = false + } + } + if !isAllSuccess { + return err + } + // Update the metadata + // Passing secondary_bucket to store the file in secondary_bucket + metadata.PutLastBackupTime(newBackupTime, secondary_bucket) + + return nil +} + +func processObject(key string, primary_bucket *bucketOperations.Bucket, secondary_bucket *bucketOperations.Bucket, errorChan chan error) error { + // fmt.Println("The object:", key, " will be processed") + + // Adding a tag to it, value is the jobrun name + err := primary_bucket.AddTag(key, "isInProcessing", os.Getenv("CE_JOBRUN")) + if err != nil { + errorChan <- fmt.Errorf("failed to add tag to object: %w", err) + return err + } + + defer primary_bucket.DeleteTag(key, "isInProcessing") + + //Get the object + object, err := primary_bucket.GetObject(key) + if err != nil { + errorChan <- fmt.Errorf("failed to fetch object: %w", err) + return err + } + + // 1. Either Let user get the object and process it + // processedObject := userDefinedProcess(object) + // Put the object into another bucket + // bucket.UploadObjectToBucket(processedObject) + + // 2. Let the user get the bytes and process it + // Convert to Bytes + objectBytes, err := utils.ConvertObjectToByte(object) + if err != nil { + errorChan <- fmt.Errorf("error retriving the object for processing") + return fmt.Errorf("error retriving the object for processing") + } + + processedBytes, err := userDefinedProcess.UserDefinedProcessObjectBytes(objectBytes) + if err != nil { + errorChan <- fmt.Errorf("failed to process the object: %w", err) + return err + } + + err = secondary_bucket.UploadBytesToBucket(key, processedBytes) + if err != nil { + errorChan <- fmt.Errorf("failed to upload the object: %w", err) + return err + } + + errorChan <- nil + close(errorChan) + return nil +} diff --git a/code-engine-cos2cos/process/process_test.go b/code-engine-cos2cos/process/process_test.go new file mode 100644 index 00000000..e901e68c --- /dev/null +++ b/code-engine-cos2cos/process/process_test.go @@ -0,0 +1,365 @@ +package process + +import ( + "bytes" + "errors" + "fmt" + "io" + "os" + "strings" + "testing" + "time" + + "github.com/IBM/ibm-cos-sdk-go/aws" + "github.com/IBM/ibm-cos-sdk-go/service/s3" + "github.com/onsi/ginkgo/v2" + "github.com/onsi/gomega" + "github.com/stretchr/testify/mock" + + "ibm.com/codeengine/cos2cos/bucketOperations" + mockS3 "ibm.com/codeengine/cos2cos/mock" + "ibm.com/codeengine/cos2cos/utils" +) + +func TestBackup(t *testing.T) { + gomega.RegisterFailHandler(ginkgo.Fail) + ginkgo.RunSpecs(t, "Backup Suite") +} + +var _ = ginkgo.Describe("getObjectIndex", func() { + + getObjectIndexTestCases := []struct { + description string + ObjectKey string + array_size int64 + expectedResult int64 + }{ + { + description: "When object name start with a", + ObjectKey: "atestfile", + array_size: 26, + expectedResult: 0, + }, + { + description: "When object name start with a, array size 27", + ObjectKey: "atestfile", + array_size: 27, + expectedResult: 0, + }, + { + description: "When array size exceed 26", + ObjectKey: "atestfile", + array_size: 99, + expectedResult: 0, + }, + { + description: "When object name start with z, array size 26", + ObjectKey: "ztestfile", + array_size: 26, + expectedResult: 25, + }, + { + description: "When object name start with z, array size 13", + ObjectKey: "ztestfile", + array_size: 13, + expectedResult: 12, + }, + { + description: "When object name start with b, array size is 12", + ObjectKey: "btestfile", + array_size: 12, + expectedResult: 1, + }, + { + description: "When object name start with d, array size is 12", + ObjectKey: "dtestfile", + array_size: 12, + expectedResult: 3, + }, + { + description: "When object name start with z, array size is 12", + ObjectKey: "ztestfile", + array_size: 12, + expectedResult: 1, + }, + { + description: "When array size is 1", + ObjectKey: "rtestfile", + array_size: 1, + expectedResult: 0, + }, + { + description: "When array size is 2", + ObjectKey: "ttestfile", + array_size: 2, + expectedResult: 1, + }, + } + + for _, tc := range getObjectIndexTestCases { + + ginkgo.It(tc.description, func() { + res := getObjectIndex(tc.ObjectKey, tc.array_size) + + gomega.Expect(res).To(gomega.Equal(tc.expectedResult)) + }) + } +}) + +var _ = ginkgo.Describe("ListAllObjectsForProcessing", func() { + // Mimic the Code Engine Parallel Job Instance call to this function + os.Setenv("JOB_ARRAY_SIZE", "26") + + //Setup the mock bucket + var ( + mockClient *mockS3.MockS3Client + bucket *bucketOperations.Bucket + ) + + // Setup before each test + ginkgo.BeforeEach(func() { + mockClient = new(mockS3.MockS3Client) + bucket = &bucketOperations.Bucket{ + Name: "test-bucket", + Client: mockClient, // Inject mock client for testing + } + }) + testCases := []struct { + description string + mockOutput *s3.ListObjectsV2Output + last_time_modified time.Time + mockError error + expectedOutput []string + expectedError error + JOB_INDEX int + }{ + { + description: "2 files, job index 0, both need update", + last_time_modified: utils.ParseTime("2025-02-07T09:49:42Z"), + mockOutput: &s3.ListObjectsV2Output{ + Contents: []*s3.Object{ + {Key: aws.String("a_object1"), LastModified: utils.TimePointer(utils.ParseTime("2025-03-07T09:49:42Z"))}, + {Key: aws.String("b_object2"), LastModified: utils.TimePointer(utils.ParseTime("2025-03-07T09:49:42Z"))}, + }, + }, + mockError: nil, + JOB_INDEX: 0, + expectedOutput: []string{"a_object1"}, + expectedError: nil, + }, + { + description: "2 files, job index 1, both need update", + last_time_modified: utils.ParseTime("2025-02-07T09:49:42Z"), + mockOutput: &s3.ListObjectsV2Output{ + Contents: []*s3.Object{ + {Key: aws.String("a_object1"), LastModified: utils.TimePointer(utils.ParseTime("2025-03-07T09:49:42Z"))}, + {Key: aws.String("b_object2"), LastModified: utils.TimePointer(utils.ParseTime("2025-03-07T09:49:42Z"))}, + }, + }, + mockError: nil, + JOB_INDEX: 2, + expectedOutput: []string{}, + expectedError: nil, + }, + { + description: "2 files, no need update", + last_time_modified: utils.ParseTime("2025-04-07T09:49:42Z"), + mockOutput: &s3.ListObjectsV2Output{ + Contents: []*s3.Object{ + {Key: aws.String("a_object1"), LastModified: utils.TimePointer(utils.ParseTime("2025-03-07T09:49:42Z"))}, + {Key: aws.String("b_object2"), LastModified: utils.TimePointer(utils.ParseTime("2025-03-07T09:49:42Z"))}, + }, + }, + mockError: nil, + JOB_INDEX: 0, + expectedOutput: []string{}, + expectedError: nil, + }, + { + description: "2 files, both need update", + last_time_modified: utils.ParseTime("2025-02-07T09:49:42Z"), + mockOutput: &s3.ListObjectsV2Output{ + Contents: []*s3.Object{ + {Key: aws.String("a_object1"), LastModified: utils.TimePointer(utils.ParseTime("2025-03-07T09:49:42Z"))}, + {Key: aws.String("a_object2"), LastModified: utils.TimePointer(utils.ParseTime("2025-03-07T09:49:42Z"))}, + }, + }, + mockError: nil, + JOB_INDEX: 0, + expectedOutput: []string{"a_object1", "a_object2"}, + expectedError: nil, + }, + { + description: "no files", + last_time_modified: utils.ParseTime("2025-02-07T09:49:42Z"), + mockOutput: &s3.ListObjectsV2Output{ + Contents: []*s3.Object{}, + }, + mockError: errors.New("not fetch objects"), + JOB_INDEX: 0, + expectedOutput: []string{}, + expectedError: errors.New("not fetch objects"), + }, + } + + for _, tc := range testCases { + ginkgo.It(tc.description, func() { + os.Setenv("JOB_INDEX", fmt.Sprint(tc.JOB_INDEX)) + mockClient.On("ListObjectsV2", mock.Anything).Return(tc.mockOutput, tc.mockError) + mockClient.On("GetObjectTagging", mock.Anything).Return(&s3.GetObjectTaggingOutput{}, nil) + output, err := ListAllObjectsForProcessing(bucket, tc.last_time_modified) + fmt.Println(output) + if tc.expectedError != nil { + gomega.Expect(err).ToNot(gomega.BeNil()) + } else { + gomega.Expect(output).To(gomega.Equal(tc.expectedOutput)) + gomega.Expect(err).To(gomega.BeNil()) + } + }) + } + ginkgo.It("Tag Error", func() { + os.Setenv("JOB_INDEX", "0") + mockClient.On("ListObjectsV2", mock.Anything).Return(&s3.ListObjectsV2Output{ + Contents: []*s3.Object{ + {Key: aws.String("a_object1"), LastModified: utils.TimePointer(utils.ParseTime("2025-03-07T09:49:42Z"))}, + {Key: aws.String("a_object2"), LastModified: utils.TimePointer(utils.ParseTime("2025-03-07T09:49:42Z"))}, + }}, nil) + + mockClient.On("GetObjectTagging", mock.Anything).Return(&s3.GetObjectTaggingOutput{}, errors.New("Error getting tags")) + output, err := ListAllObjectsForProcessing(bucket, utils.ParseTime("2025-04-07T09:49:42Z")) + fmt.Println(output) + + gomega.Expect(err).ToNot(gomega.BeNil()) + + }) +}) + +var _ = ginkgo.Describe("processObject", func() { + var mockClient *mockS3.MockS3Client + var bucket *bucketOperations.Bucket + var errorChan chan error + + ginkgo.BeforeEach(func() { + mockClient = new(mockS3.MockS3Client) + bucket = &bucketOperations.Bucket{ + Name: "test-bucket", + Client: mockClient, + } + errorChan = make(chan error, 1) + }) + ginkgo.Context("should handle errors at various stages", func() { + + ginkgo.It("should handle AddTag errors", func() { + mockClient.On("PutObjectTagging", mock.Anything).Return(&s3.PutObjectTaggingOutput{}, errors.New("could not add tag")) + + processObject("dummy", bucket, bucket, errorChan) + + err := <-errorChan + + gomega.Expect(err).ToNot(gomega.BeNil()) + }) + + ginkgo.It("should handle GetObject errors", func() { + mockClient.On("PutObjectTagging", mock.Anything).Return(&s3.PutObjectTaggingOutput{}, nil) + mockClient.On("GetObject", mock.Anything).Return(&s3.GetObjectOutput{}, errors.New("could not get object")) + mockClient.On("DeleteObjectTagging", mock.Anything).Return(&s3.DeleteObjectTaggingOutput{}, nil) + processObject("dummy", bucket, bucket, errorChan) + + err := <-errorChan + + gomega.Expect(err).ToNot(gomega.BeNil()) + + }) + ginkgo.It("should handle PutObject errors", func() { + mockClient.On("PutObjectTagging", mock.Anything).Return(&s3.PutObjectTaggingOutput{}, nil) + mockClient.On("GetObject", mock.Anything).Return(&s3.GetObjectOutput{}, nil) + mockClient.On("PutObject", mock.Anything).Return(&s3.PutObjectOutput{}, errors.New("could not put object")) + mockClient.On("DeleteObjectTagging", mock.Anything).Return(&s3.DeleteObjectTaggingOutput{}, nil) + + processObject("dummy", bucket, bucket, errorChan) + + err := <-errorChan + + gomega.Expect(err).ToNot(gomega.BeNil()) + + }) + ginkgo.It("should handle DeleteTag errors", func() { + mockClient.On("PutObjectTagging", mock.Anything).Return(&s3.PutObjectTaggingOutput{}, nil) + mockClient.On("GetObject", mock.Anything).Return(&s3.GetObjectOutput{}, nil) + mockClient.On("PutObject", mock.Anything).Return(&s3.PutObjectOutput{}, nil) + + mockClient.On("DeleteObjectTagging", mock.Anything).Return(&s3.DeleteObjectTaggingOutput{}, errors.New("could not delete tag")) + + processObject("dummy", bucket, bucket, errorChan) + + err := <-errorChan + + gomega.Expect(err).ToNot(gomega.BeNil()) + + }) + }) + + ginkgo.Context("should process successfully", func() { + ginkgo.It("should handle have no errors", func() { + mockClient.On("PutObjectTagging", mock.Anything).Return(&s3.PutObjectTaggingOutput{}, nil) + + mockClient.On("GetObject", mock.Anything).Return(&s3.GetObjectOutput{ + Body: io.NopCloser(bytes.NewReader([]byte("Hello, World!"))), + }, nil) + + mockClient.On("PutObject", mock.Anything).Return(&s3.PutObjectOutput{}, nil) + + mockClient.On("DeleteObjectTagging", mock.Anything).Return(&s3.DeleteObjectTaggingOutput{}, nil) + + processObject("dummy", bucket, bucket, errorChan) + + err := <-errorChan + + gomega.Expect(err).To(gomega.BeNil()) + }) + }) +}) + +var _ = ginkgo.Describe("StartProcessing", func() { + var mockClient *mockS3.MockS3Client + var bucket *bucketOperations.Bucket + + ginkgo.BeforeEach(func() { + mockClient = new(mockS3.MockS3Client) + bucket = &bucketOperations.Bucket{ + Name: "test-bucket", + Client: mockClient, + } + }) + ginkgo.It("should have no errors", func() { + mockClient.On("GetObject", mock.Anything).Return(&s3.GetObjectOutput{ + Body: aws.ReadSeekCloser(strings.NewReader(`{"timestamp":"2025-02-07T09:49:42Z"}`)), + }, nil) + + mockClient.On("ListObjectsV2", mock.Anything).Return(&s3.ListObjectsV2Output{ + Contents: []*s3.Object{ + {Key: aws.String("a_object1"), LastModified: utils.TimePointer(utils.ParseTime("2025-03-07T09:49:42Z"))}, + {Key: aws.String("b_object2"), LastModified: utils.TimePointer(utils.ParseTime("2025-03-07T09:49:42Z"))}, + }, + }, nil) + + mockClient.On("GetObjectTagging", mock.Anything).Return(&s3.GetObjectTaggingOutput{}, nil) + + mockClient.On("PutObjectTagging", mock.Anything).Return(&s3.PutObjectTaggingOutput{}, nil) + + mockClient.On("GetObject", mock.Anything).Return(&s3.GetObjectOutput{ + Body: io.NopCloser(bytes.NewReader([]byte("Hello, World!"))), + }, nil) + + mockClient.On("PutObject", mock.Anything).Return(&s3.PutObjectOutput{}, nil) + + mockClient.On("DeleteObjectTagging", mock.Anything).Return(&s3.DeleteObjectTaggingOutput{}, nil) + + err := StartProcessing(bucket, bucket) + + gomega.Expect(err).To(gomega.BeNil()) + }) + // ginkgo.Context("should handle errors", func() { + + // }) +}) diff --git a/code-engine-cos2cos/run.sh b/code-engine-cos2cos/run.sh new file mode 100755 index 00000000..34eada1d --- /dev/null +++ b/code-engine-cos2cos/run.sh @@ -0,0 +1,93 @@ +#!/bin/sh + +source data.sh + +alias ic=ibmcloud +ic target -g ${PROJECT_RESOURCE_GROUP} + +set -e + +# ======================================== +# === Stage 1: Upload data files to COS === +# ======================================== +echo -e "\n\n Stage 1: Uploading files to Primary COS Bucket..." + +UPLOAD_DIR="./input-data" + +# Make sure COS instance CRN is configured +ibmcloud cos config crn --crn "${COS_INSTANCE_CRN_PRIMARY}" --force + +for file in "$UPLOAD_DIR"/*; do + [ -f "$file" ] || continue # skip non-files + filename=$(basename "$file") + + echo "Uploading $filename..." + ibmcloud cos object-put \ + --bucket "${COS_BUCKET_NAME_PRIMARY}" \ + --key "$filename" \ + --region "${COS_REGION_PRIMARY}" \ + --body "$file" + + if [ $? -eq 0 ]; then + echo "Uploaded $filename" + else + echo "Failed to upload $filename" + fi +done + + +# ======================================== +# === Stage 2: Show uploaded files === +# ======================================== +echo -e "\n\n Stage 2: Listing uploaded files in Primary Bucket..." + +ibmcloud cos config crn --crn "${COS_INSTANCE_CRN_PRIMARY}" --force + +ibmcloud cos objects --bucket "${COS_BUCKET_NAME_PRIMARY}" --region "${COS_REGION_PRIMARY}" + +# ======================================== +# === Stage 3: Submit the Job === +# ======================================== +echo -e "\n\n Stage 3: Submitting the job..." + +RANDOM_CODE=$(printf "%06d" $((RANDOM % 1000000))) +JOB_RUN_NAME="${JOB_NAME}-${RANDOM_CODE}" + +ibmcloud ce jobrun submit --job "${JOB_NAME}" --name "${JOB_RUN_NAME}" --array-size ${ARRAY_SIZE} + +echo "Waiting for logs..." +sleep 5 # Give a few seconds for job to start before tailing logs + +ibmcloud ce jobrun logs --name "${JOB_RUN_NAME}" --follow + +# ======================================== +# === Stage 4: Check files in Secondary === +# ======================================== +echo -e "\n\n Stage 4: Checking processed files in Secondary Bucket..." + +ibmcloud cos config crn --crn "${COS_INSTANCE_CRN_SECONDARY}" --force + +ibmcloud cos objects --bucket "${COS_BUCKET_NAME_SECONDARY}" --region "${COS_REGION_SECONDARY}" + +# ======================================== +# === Stage 5: Fetch one file from COS === +# ======================================== +echo -e "\n\n Stage 5: Fetching a processed file from Secondary Bucket..." + +# Example: Fetch `file.txt` back from secondary COS bucket +# Replace `file.txt` with actual processed object name + +# curl -X GET \ +# -H "Authorization: Bearer $(cat ${IBM_COS_CRTokenFilePath_SECONDARY})" \ +# "https://${COS_BUCKET_NAME_SECONDARY}.${COS_ENDPOINT_SECONDARY}/tuples.py" \ +# -o downloaded_file.txt +for file in "$UPLOAD_DIR"/*; do + [ -f "$file" ] || continue # skip non-files + filename=$(basename "$file") + ibmcloud cos object-get --bucket ${COS_BUCKET_NAME_SECONDARY} --key $filename "output-data/processed_$filename" + echo "Fetched file and saved as 'processed_$filename'" +done +# ======================================== +# === DONE === +# ======================================== +echo -e "\n\nAll stages complete!" diff --git a/code-engine-cos2cos/testFiles/generator.go b/code-engine-cos2cos/testFiles/generator.go new file mode 100644 index 00000000..d9692bdd --- /dev/null +++ b/code-engine-cos2cos/testFiles/generator.go @@ -0,0 +1,37 @@ +package main + +import ( + "fmt" + "log" + "os" + "strings" +) + +// This is a dummy testfile generator and has nothing to do with actual working of the code. +// It is for testing purpose +func generateScript() { + n := 2000 + for i := range n { + + c := rune(i%26) + 'a' + filename := fmt.Sprintf("%ctest%d.txt", c, i) + + tempFile, err := os.Create(fmt.Sprintf("testFiles/%s", filename)) + + if err != nil { + log.Fatal(err) + } + + var sb strings.Builder + for i := range 1 { + sb.WriteString(fmt.Sprintf("This is file %d which is going to be processed. Capitalize it !\n", i)) + } + // Read from response body + _, _ = tempFile.WriteString(sb.String()) + + } +} + +func main() { + generateScript() +} diff --git a/code-engine-cos2cos/userDefinedProcess/processObject.go b/code-engine-cos2cos/userDefinedProcess/processObject.go new file mode 100644 index 00000000..7ba08963 --- /dev/null +++ b/code-engine-cos2cos/userDefinedProcess/processObject.go @@ -0,0 +1,18 @@ +package userDefinedProcess + +import ( + "fmt" + "strings" + + "github.com/IBM/ibm-cos-sdk-go/service/s3" +) + +// Sample function. To be modified as per use. +func UserDefinedProcessObject(object *s3.GetObjectOutput) *s3.GetObjectOutput { + fmt.Println(object) + return object +} + +func UserDefinedProcessObjectBytes(objectBytes []byte) ([]byte, error) { + return []byte(strings.ToUpper(string(objectBytes))), nil +} diff --git a/code-engine-cos2cos/utils/convert.go b/code-engine-cos2cos/utils/convert.go new file mode 100644 index 00000000..b0071bb4 --- /dev/null +++ b/code-engine-cos2cos/utils/convert.go @@ -0,0 +1,29 @@ +package utils + +import ( + "fmt" + "io" + "time" + + "github.com/IBM/ibm-cos-sdk-go/service/s3" +) + +func ConvertObjectToByte(result *s3.GetObjectOutput) ([]byte, error) { + if result == nil || result.Body == nil { + return nil, fmt.Errorf("invalid GetObjectOutput or Body is nil") + } + data, err := io.ReadAll(result.Body) + if err != nil { + return nil, fmt.Errorf("failed to read object data: %w", err) + } + + return data, nil +} + +func ParseTime(timeString string) time.Time { + res, _ := (time.Parse(time.RFC3339, timeString)) + return res +} +func TimePointer(t time.Time) *time.Time { + return &t +} diff --git a/code-engine-cos2cos/utils/convert_test.go b/code-engine-cos2cos/utils/convert_test.go new file mode 100644 index 00000000..e4cafc80 --- /dev/null +++ b/code-engine-cos2cos/utils/convert_test.go @@ -0,0 +1,73 @@ +package utils_test + +import ( + "bytes" + "fmt" + "io" + "testing" + + "github.com/IBM/ibm-cos-sdk-go/service/s3" + "github.com/onsi/ginkgo/v2" + "github.com/onsi/gomega" + "ibm.com/codeengine/cos2cos/utils" +) + +func TestUtils(t *testing.T) { + gomega.RegisterFailHandler(ginkgo.Fail) + ginkgo.RunSpecs(t, "Utils Suite") +} + +var _ = ginkgo.Describe("ConvertObjectToByte", func() { + + // Define test cases as a slice of structs + testCases := []struct { + name string + input *s3.GetObjectOutput + expected []byte + expectedErr bool + }{ + { + name: "When the input is valid", + input: &s3.GetObjectOutput{ + Body: io.NopCloser(bytes.NewReader([]byte("Hello, World!"))), + }, + expected: []byte("Hello, World!"), + expectedErr: false, + }, + { + name: "When the Body is nil", + input: &s3.GetObjectOutput{ + Body: nil, + }, + expected: nil, + expectedErr: true, + }, + { + name: "When the input is nil", + input: nil, + expected: nil, + expectedErr: true, + }, + } + + // Loop through test cases and run each one + for _, tc := range testCases { + + ginkgo.Context(tc.name, func() { + ginkgo.It(fmt.Sprintf("should behave as expected for %s", tc.name), func() { + // Call the ConvertObjectToByte function + result, err := utils.ConvertObjectToByte(tc.input) + + // Assert that the error behavior matches the expected behavior + if tc.expectedErr { + gomega.Expect(err).To(gomega.HaveOccurred()) + } else { + gomega.Expect(err).To(gomega.BeNil()) + } + + // Assert that the result matches the expected byte slice + gomega.Expect(result).To(gomega.Equal(tc.expected)) + }) + }) + } +}) From 31c8d5832ab9f32541b93e88b3713ef04fa1d5ee Mon Sep 17 00:00:00 2001 From: Shailza Thakur Date: Tue, 1 Jul 2025 18:10:59 +0530 Subject: [PATCH 02/19] Update readme Signed-off-by: Hamza Bharmal --- code-engine-cos2cos/README.md | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/code-engine-cos2cos/README.md b/code-engine-cos2cos/README.md index bc3eb4e5..20ed4011 100644 --- a/code-engine-cos2cos/README.md +++ b/code-engine-cos2cos/README.md @@ -42,7 +42,7 @@ Before you begin, ensure you have the following: ### Setup & Configuration 1. **Clone the Repository**: ```bash - git clone https://github.ibm.com/ibmcloud-codeengine-internship/code-engine-cos2cos + git clone https://github.com/IBM/CodeEngine.git cd code-engine-cos2cos ``` 2. **Modify the Processing Function (Optional)**: @@ -133,19 +133,9 @@ go test -v You can build and push the container image using one of the following methods. -**Note**: If you are using the [build.sh](/build.sh), by-default the [Method-2](#2-build-using-source-code-local-source) is used. +**Note**: If you are using the [build.sh](/build.sh), by-default the [Method-1](#1-build-using-source-code-local-source) is used. -#### 1. Build Using Podman (Local Source) - -If you want to build the container image using a local `Dockerfile` with Podman, follow these steps: - -```bash -ibmcloud cr login -podman build -t ${REGISTRY}/${NAMESPACE}/${IMAGE_NAME} --platform linux/amd64 . -podman push ${REGISTRY}/${NAMESPACE}/${IMAGE_NAME} -``` - -#### 2. Build Using Source Code (Local Source) +#### 1. Build Using Source Code (Local Source) To build the image from local source code using IBM Cloud Code Engine: @@ -154,7 +144,7 @@ ibmcloud ce build create --name ${BUILD_NAME} --build-type local --image ${REGIS ibmcloud ce buildrun submit --build ${BUILD_NAME} --name ${BUILD_NAME}-build-run ``` -#### 3. Build Using Git-based Source +#### 2. Build Using Git-based Source To build the image using a Git repository: From 933b00f5cee4eb18b5aa9af5f8ebf13561d89b4b Mon Sep 17 00:00:00 2001 From: Shailza Thakur Date: Tue, 1 Jul 2025 18:13:09 +0530 Subject: [PATCH 03/19] Update data.sh with fewer variables Signed-off-by: Hamza Bharmal --- code-engine-cos2cos/data.sh | 26 +++----------------------- 1 file changed, 3 insertions(+), 23 deletions(-) diff --git a/code-engine-cos2cos/data.sh b/code-engine-cos2cos/data.sh index 26348190..b76606f6 100644 --- a/code-engine-cos2cos/data.sh +++ b/code-engine-cos2cos/data.sh @@ -1,24 +1,15 @@ #!/bin/sh -# This can be modified as per the User preference. - -# Image details -CR_REGION=eu-es -RESOURCE_GROUP=demo-rg -REGISTRY=private.es.icr.io -REGISTRY_NAMESPACE=dummy -IMAGE_NAME=dev-cos2cos1 -PROJECT_NAME="CE Internship" # Regions COS_REGION_PRIMARY=au-syd COS_REGION_SECONDARY=eu-es PROJECT_REGION=eu-es -PROJECT_RESOURCE_GROUP=demo-rg PROJECT_NAME="Cos2Cos" # Resource groups -COS_RESOURCE_GROUP_PRIMARY=demo-rg-primary -COS_RESOURCE_GROUP_SECONDARY=demo-rg-secondary +PROJECT_RESOURCE_GROUP=resource-group +COS_RESOURCE_GROUP_PRIMARY=resource-group +COS_RESOURCE_GROUP_SECONDARY=resource-group #Creating a COS Instance COS_INSTANCE_NAME_PRIMARY=cos-instance-one @@ -31,27 +22,16 @@ COS_BUCKET_NAME_SECONDARY=cos-bucket-two # Endpoints COS_ENDPOINT_PRIMARY=s3.au-syd.cloud-object-storage.appdomain.cloud COS_ENDPOINT_SECONDARY=s3.eu-es.cloud-object-storage.appdomain.cloud -# Timestamp -BUCKET_TIMESTAMP_FILENAME=last_modified_time.json - -# CRTokenFilePath -IBM_COS_CRTokenFilePath_PRIMARY=/var/run/secrets/codeengine.cloud.ibm.com/compute-resource-token/token -IBM_COS_CRTokenFilePath_SECONDARY=/var/run/secrets/codeengine.cloud.ibm.com/compute-resource-token/token # Trusted Profile TRUSTED_PROFILE_NAME=demo-trusted-profile # Jobs JOB_NAME=cos2cos-job -JOB_IMAGE=private.es.icr.io/cos2cos/dev-cos2cos # Define registry credentials -REGISTRY_SERVER=private.es.icr.io BASE_SECRET=ce-secret AUTH_SECRET=auth-secret -CONTAINER_REGISTRY_SECRET=container-registry-secret - -API_KEY_NAME=api-key # Array Size of Job run Instance ARRAY_SIZE=13 \ No newline at end of file From 02dbc764e38946775193a0bbb4bfe557d661440c Mon Sep 17 00:00:00 2001 From: Shailza Thakur Date: Tue, 1 Jul 2025 18:15:14 +0530 Subject: [PATCH 04/19] Remove container registry secret creation Signed-off-by: Hamza Bharmal --- code-engine-cos2cos/config.sh | 61 ++++++----------------------------- 1 file changed, 10 insertions(+), 51 deletions(-) diff --git a/code-engine-cos2cos/config.sh b/code-engine-cos2cos/config.sh index 17d16ed6..88a703ff 100755 --- a/code-engine-cos2cos/config.sh +++ b/code-engine-cos2cos/config.sh @@ -103,7 +103,7 @@ if [ $? -eq 0 ]; then fi echo "-----" -echo "Step 6: Creating Secrets (Base Secret, Auth Secret, Registry Secret)" +echo "Step 6: Creating Secrets (Base Secret, Auth Secret)" echo "---" echo "Step 6.1: Creating Base Secret: $BASE_SECRET" ibmcloud ce secret create --name ${BASE_SECRET} \ @@ -147,45 +147,9 @@ else echo -e "${GREEN}SUCCESS${NC}: Step 6.1: Base secret creation complete." fi -# Creating a container registry secret -echo "---" -echo "Step 6.2: Creating Container Registry Secret: $CONTAINER_REGISTRY_SECRET" -echo "Step 6.2.1: Checking if API key '${API_KEY_NAME}' exists" -API_KEY=$(ibmcloud iam api-keys --output json | jq -r ".[] | select(.name == \"$API_KEY_NAME\") | .apikey") - -if [ -z "$API_KEY" ]; then - echo "API key '${API_KEY_NAME}' does not exist. Creating a new one..." - API_KEY=$(ibmcloud iam api-key-create ${API_KEY_NAME} -d "API Key for IBM CR COS-to-COS" -o JSON | jq -r '.apikey' ) - # ibmcloud iam api-key-create ${API_KEY_NAME} -d "API Key for IBM CR COS-to-COS" --file key_file - echo "API key '${API_KEY_NAME}' created." -else - echo "API key '${API_KEY_NAME}' already exists. Skipping creation." -fi - -ibmcloud ce secret create --name ${CONTAINER_REGISTRY_SECRET} --format registry --password ${API_KEY} --server ${REGISTRY_SERVER} -if [ $? -ne 0 ]; then - echo "Secret '${CONTAINER_REGISTRY_SECRET}' already exists." - read -p "Do you want to override the existing container registry secret? (y/n): " confirm - if [[ "$confirm" =~ ^[Yy]$ ]]; then - echo "Updating secret ${CONTAINER_REGISTRY_SECRET}..." - ibmcloud ce secret update --name ${CONTAINER_REGISTRY_SECRET} --password ${API_KEY} --server ${REGISTRY_SERVER} - - if [ $? -eq 0 ]; then - echo -e "${GREEN}SUCCESS${NC}: Step 6.2: Container Registry secret update complete." - else - echo -e "${RED}ERROR${NC}: Failed to update container registry secret." - exit 1 - fi - else - echo "Container registry secret update cancelled by user. Exiting..." - exit 0 - fi -else - echo -e "${GREEN}SUCCESS${NC}: Step 6.2: Container Registry secret creation complete." -fi # Auth secrets echo "---" -echo "Step 6.3: Creating Auth Secret: $AUTH_SECRET" +echo "Step 6.2: Creating Auth Secret: $AUTH_SECRET" ibmcloud ce secret create --name ${AUTH_SECRET} \ --from-literal IBM_COS_CRTokenFilePath_PRIMARY=${IBM_COS_CRTokenFilePath_PRIMARY} \ --from-literal IBM_COS_CRTokenFilePath_SECONDARY=${IBM_COS_CRTokenFilePath_SECONDARY} \ @@ -213,27 +177,30 @@ if [ $? -ne 0 ]; then exit 0 fi else - echo -e "${GREEN}SUCCESS${NC}: Step 6.3: Auth secret creation complete." + echo -e "${GREEN}SUCCESS${NC}: Step 6.2: Auth secret creation complete." fi # Create a job # Create the job with environment variables, including the bucket's region echo "-----" echo "Step 7: Creating JOB with name $JOB_NAME" -ibmcloud ce job create --name ${JOB_NAME} --image "${JOB_IMAGE}" \ - --registry-secret ${CONTAINER_REGISTRY_SECRET} \ +ibmcloud ce job create --name ${JOB_NAME} \ + --src "." \ --env-from-secret ${BASE_SECRET} \ --env-from-secret ${AUTH_SECRET} \ - --argument true 2>/dev/null + --argument true 2>/dev/null \ + --wait + # --registry-secret ${CONTAINER_REGISTRY_SECRET} \ + if [ $? -ne 0 ]; then # echo "Job '${JOB_NAME}' already exists. Exiting" # exit 1 echo "Job '${JOB_NAME}' already exists. Updating Job." ibmcloud ce job update --name ${JOB_NAME} --image "${JOB_IMAGE}" \ - --registry-secret ${CONTAINER_REGISTRY_SECRET} \ --env-from-secret ${BASE_SECRET} \ --env-from-secret ${AUTH_SECRET} \ --argument true 2>/dev/null + # --registry-secret ${CONTAINER_REGISTRY_SECRET} \ fi if [ $? -eq 0 ]; then echo -e "${GREEN}SUCCESS${NC}Step 7: Job Created" @@ -277,11 +244,3 @@ curl \ }" 2>/dev/null # echo "******* DONE: Compute Resource Token *******" -# echo "-----" -# echo "Step 9" -# # Submit the job -# echo "LIVE !" -# echo " ********** Submitting the job and Logs *********" -# RANDOM_CODE=$(printf "%06d" $((RANDOM % 1000000))) -# ibmcloud ce jobrun submit --job ${JOB_NAME} --name ${JOB_NAME}-${RANDOM_CODE} -# ibmcloud ce jobrun logs --name ${JOB_NAME}-${RANDOM_CODE} --follow From ad798a5db28c6362fb96d1d3ce164af2df01e72b Mon Sep 17 00:00:00 2001 From: Shailza Thakur Date: Tue, 1 Jul 2025 18:15:28 +0530 Subject: [PATCH 05/19] Delete build.sh Signed-off-by: Hamza Bharmal --- code-engine-cos2cos/build.sh | 10 ---------- 1 file changed, 10 deletions(-) delete mode 100755 code-engine-cos2cos/build.sh diff --git a/code-engine-cos2cos/build.sh b/code-engine-cos2cos/build.sh deleted file mode 100755 index 48c53232..00000000 --- a/code-engine-cos2cos/build.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/sh - -BUILD_NAME=example-script-build-cos2cos-${IMAGE_NAME} - -ibmcloud target -r ${CR_REGION} -g ${RESOURCE_GROUP} -ibmcloud ce project list -ibmcloud ce project select -n "${PROJECT_NAME}" - -ibmcloud ce build create --name ${BUILD_NAME} --build-type local --image ${REGISTRY}/${REGISTRY_NAMESPACE}/${IMAGE_NAME} -ibmcloud ce buildrun submit --build ${BUILD_NAME} --name ${BUILD_NAME}-build-run \ No newline at end of file From 45432209c620b713e2be2031823be7168d2014ec Mon Sep 17 00:00:00 2001 From: Shailza Thakur Date: Tue, 1 Jul 2025 18:16:01 +0530 Subject: [PATCH 06/19] Remove dead-code Signed-off-by: Hamza Bharmal --- .../bucketOperations/bucketOperations_test.go | 85 ----------------- .../bucketOperations/bucketOps.go | 94 ------------------- .../bucketOperations/objectOps.go | 61 +----------- .../bucketOperations/objectOps_test.go | 5 - .../metadata/local-timestamp.go | 49 ---------- code-engine-cos2cos/metadata/timestamp.go | 4 +- code-engine-cos2cos/process/process.go | 20 +--- code-engine-cos2cos/process/process_test.go | 3 - .../userDefinedProcess/processObject.go | 8 -- 9 files changed, 4 insertions(+), 325 deletions(-) delete mode 100644 code-engine-cos2cos/metadata/local-timestamp.go diff --git a/code-engine-cos2cos/bucketOperations/bucketOperations_test.go b/code-engine-cos2cos/bucketOperations/bucketOperations_test.go index 4568ba2e..3d329762 100644 --- a/code-engine-cos2cos/bucketOperations/bucketOperations_test.go +++ b/code-engine-cos2cos/bucketOperations/bucketOperations_test.go @@ -17,90 +17,6 @@ func TestBucketOps(t *testing.T) { ginkgo.RunSpecs(t, "BucketOperations Suite") } -var _ = ginkgo.Describe("ListAvailableBuckets", func() { - var ( - mockS3Client *MockS3Client - bucket *Bucket - ) - - // Before each test, initialize the mock and the Bucket struct - ginkgo.BeforeEach(func() { - mockS3Client = new(MockS3Client) - bucket = &Bucket{ - Name: "test-bucket", - Client: mockS3Client, - } - }) - - // Test case struct format - type TestCase struct { - name string - mockResponse *s3.ListBucketsOutput - mockError error - expectedOutput *s3.ListBucketsOutput - expectedError error - } - - // Define test cases - var testCases = []TestCase{ - { - name: "Successful ListBuckets", - mockResponse: &s3.ListBucketsOutput{ - Buckets: []*s3.Bucket{ - { - Name: aws.String("bucket1"), - }, - { - Name: aws.String("bucket2"), - }, - }, - }, - mockError: nil, - expectedOutput: &s3.ListBucketsOutput{ - Buckets: []*s3.Bucket{ - { - Name: aws.String("bucket1"), - }, - { - Name: aws.String("bucket2"), - }, - }, - }, - expectedError: nil, - }, - { - name: "Error Listing Buckets", - mockResponse: nil, - mockError: errors.New("failed to list buckets"), - expectedOutput: nil, - expectedError: errors.New("failed to list buckets"), - }, - } - - // Iterate over test cases - for _, testCase := range testCases { - ginkgo.It(testCase.name, func() { - // Arrange mock behavior - mockS3Client.On("ListBuckets", mock.Anything).Return(testCase.mockResponse, testCase.mockError) - - // Act - result, err := bucket.ListAvailableBuckets() - - // Assert the results - if testCase.expectedError != nil { - gomega.Expect(err).To(gomega.HaveOccurred()) - gomega.Expect(err.Error()).To(gomega.Equal(testCase.expectedError.Error())) - } else { - gomega.Expect(err).ToNot(gomega.HaveOccurred()) - gomega.Expect(result).To(gomega.Equal(testCase.expectedOutput)) - } - - // Assert that the mock was called as expected - mockS3Client.AssertExpectations(ginkgo.GinkgoT()) - }) - } -}) - var _ = ginkgo.Describe("ListBucketObjects", func() { var ( @@ -148,7 +64,6 @@ var _ = ginkgo.Describe("ListBucketObjects", func() { // Iterate through the test cases for _, tc := range testCases { - tc := tc // capture the range variable ginkgo.Context(tc.description, func() { ginkgo.It("should handle the response correctly", func() { // Setup mock expectation diff --git a/code-engine-cos2cos/bucketOperations/bucketOps.go b/code-engine-cos2cos/bucketOperations/bucketOps.go index bed7a0cd..255ce294 100644 --- a/code-engine-cos2cos/bucketOperations/bucketOps.go +++ b/code-engine-cos2cos/bucketOperations/bucketOps.go @@ -40,7 +40,6 @@ type CosSession struct { } func NewCosClient(apiKey, resourceInstanceID, serviceEndpoint, region string, authEndpoint ...string) *s3.S3 { - // fmt.Println("Creating cos Client") var authEndpointVal string if len(authEndpoint) == 0 { authEndpointVal = "https://iam.cloud.ibm.com/identity/token" @@ -120,35 +119,14 @@ func (c *CosSession) CreateIBMCOSSessionTrustedProfile() *s3.S3 { return client } - -// Function to list all the available buckets for a config. -func (b *Bucket) ListAvailableBuckets() (*s3.ListBucketsOutput, error) { - - // Create client - // client := CreateIBMCOSSession() - client := b.Client - // Call Function - d, err := client.ListBuckets(&s3.ListBucketsInput{}) - - if err != nil { - fmt.Println("Could not list the buckets") - return nil, err - } - return d, nil -} - func (b *Bucket) ListBucketObjects() (*s3.ListObjectsV2Output, error) { bucketName := b.Name - // fmt.Println("Listing Bucket Objects in Bucket:", bucketName) - // Create client - // client := CreateIBMCOSSession() client := b.Client // Call Function Input := &s3.ListObjectsV2Input{ Bucket: aws.String(bucketName), } - // fmt.Println("Input object created", Input) objectList, e := client.ListObjectsV2(Input) if e != nil { @@ -161,9 +139,6 @@ func (b *Bucket) ListBucketObjects() (*s3.ListObjectsV2Output, error) { func (b *Bucket) ListBucketObjectsPagination() (*s3.ListObjectsV2Output, error) { bucketName := b.Name - // fmt.Println("Listing Bucket Objects in Bucket:", bucketName) - // Create client - // client := CreateIBMCOSSession() client := b.Client // Call Function @@ -211,72 +186,3 @@ func (bucket *Bucket) UploadBytesToBucket(objectKey string, data []byte) error { return nil } - -// Function to upload a object to a bucket. - Not Tested. -/* -func (b *Bucket) UploadObjectToBucket(object *s3.GetObjectOutput) error { - client := b.Client - - var buf bytes.Buffer - _, err := buf.ReadFrom(object.Body) - if err != nil { - log.Fatalf("Unable to read object body: %v", err) - return fmt.Errorf("Unable to read object body: %v", err) - } - - _, err = client.PutObject(&s3.PutObjectInput{ - Bucket: aws.String(destinationBucket), - Key: aws.String(destinationKey), - Body: bytes.NewReader(buf.Bytes()), - }) - if err != nil { - log.Fatalf("Unable to upload object to destination bucket: %v", err) - return fmt.Errorf("Unable to upload object to destination bucket: %v", err) - } - return nil -} -*/ - -// Returns an error if the upload fails. - Unused -/* -func (b *Bucket) UploadFileToBucket(objectKey string, filePath string) error { - - // Create client - // client := CreateIBMCOSSession() - client := b.Client - bucketName := b.Name - - // Read object into byte - - file, err := os.Open(filePath) - - if err != nil { - log.Fatalf("Unable to open file: %v", err) - return err - } - - fileInfo, err := file.Stat() - if err != nil { - log.Fatalf("Unable to get file stats: %v", err) - return err - } - fileBytes := make([]byte, fileInfo.Size()) - _, err = file.Read(fileBytes) - - if err != nil { - log.Fatalf("Unable to read file: %v", err) - return err - } - - input := s3.PutObjectInput{ - Bucket: aws.String(bucketName), - Key: aws.String(objectKey), - Body: bytes.NewReader(fileBytes), - } - - // Call Function to upload (Put) an object - _, _ = client.PutObject(&input) - // fmt.Println(result) - return nil -} -*/ diff --git a/code-engine-cos2cos/bucketOperations/objectOps.go b/code-engine-cos2cos/bucketOperations/objectOps.go index 2c0ac64f..eaf4be69 100644 --- a/code-engine-cos2cos/bucketOperations/objectOps.go +++ b/code-engine-cos2cos/bucketOperations/objectOps.go @@ -35,7 +35,7 @@ func (b *Bucket) CheckIfTagExists(objectKey string) (bool, error) { }) if err != nil { - return false, fmt.Errorf("Error getting object tags: %v", err) + return false, fmt.Errorf("error getting object tags: %v", err) } tagKeyToCheck := "isInProcessing" @@ -43,7 +43,6 @@ func (b *Bucket) CheckIfTagExists(objectKey string) (bool, error) { // Iterate through the tags and check if the key exists for _, tag := range resp.TagSet { if *tag.Key == tagKeyToCheck { - // fmt.Println(tag) return true, nil } } @@ -52,7 +51,6 @@ func (b *Bucket) CheckIfTagExists(objectKey string) (bool, error) { func IsProcessingRequired(res *s3.Object, timestamp time.Time) bool { compareVal := CompareUpdateTime(res, timestamp) - // fmt.Printf("The time for last process is: %s\nObject last time:%s\n", timestamp.String(), res.LastModified.String()) // A negative value indicates that process is required. return compareVal > 0 } @@ -74,62 +72,6 @@ func (b *Bucket) GetObject(objectKey string) (*s3.GetObjectOutput, error) { return response, nil } -/* -Function to store the object in a file in local storage -func StoreObject(objectKey string, response *s3.GetObjectOutput) error { - - tempFile, err := os.Create(fmt.Sprintf("temp/%s", objectKey)) - - if err != nil { - log.Fatal(err) - } - - // Read from response body - _, err = tempFile.ReadFrom(response.Body) - - if err != nil { - return err - } - return nil -} -*/ - -/* -// Function to get the metadata of an object without getting the object itself - Unused -func (b *Bucket) GetObjectMetadata(objectKey string) (*s3.HeadObjectOutput, error) { - client := b.Client - bucketName := b.Name - - input := &s3.HeadObjectInput{ - Bucket: aws.String(bucketName), - Key: aws.String(objectKey), - } - - result, err := client.HeadObject(input) - if err != nil { - // log.Fatalf("Failed to retrieve metadata: %v", err) - return nil, err - } - - // fmt.Println("Metadata for object:", objectKey) - // fmt.Printf("Last Modified: %v\n", result.LastModified) - // fmt.Printf("Size: %d bytes\n", *result.ContentLength) - // fmt.Printf("Content-Type: %s\n", *result.ContentType) - // fmt.Printf("ETag: %s\n", *result.ETag) - - // Print custom metadata - if result.Metadata != nil { - fmt.Println("Custom Metadata:") - for key, value := range result.Metadata { - fmt.Printf("%s: %s\n", key, *value) - } - } - - return result, nil -} -*/ -// test - // Add a tag to the object (for example, a custom tag) // The tag is added as a key-value pair func (b *Bucket) AddTag(objectKey, tagKey, tagValue string) error { @@ -170,6 +112,5 @@ func (b *Bucket) DeleteTag(objectKey, tagKey string) error { if err != nil { return fmt.Errorf("failed to delete tag to object: %v", err) } - // fmt.Printf("Tag Deleted successfully from the object: %s\n", objectKey) return nil } diff --git a/code-engine-cos2cos/bucketOperations/objectOps_test.go b/code-engine-cos2cos/bucketOperations/objectOps_test.go index 673c8486..af0c5056 100644 --- a/code-engine-cos2cos/bucketOperations/objectOps_test.go +++ b/code-engine-cos2cos/bucketOperations/objectOps_test.go @@ -13,11 +13,6 @@ import ( mockS3 "ibm.com/codeengine/cos2cos/mock" ) -// func TestObjectOps(t *testing.T) { -// gomega.RegisterFailHandler(ginkgo.Fail) -// ginkgo.RunSpecs(t, "ObjectOperations Suite") -// } - var _ = ginkgo.Describe("CompareUpdateTime", func() { type testCase struct { name string diff --git a/code-engine-cos2cos/metadata/local-timestamp.go b/code-engine-cos2cos/metadata/local-timestamp.go deleted file mode 100644 index 24179e2b..00000000 --- a/code-engine-cos2cos/metadata/local-timestamp.go +++ /dev/null @@ -1,49 +0,0 @@ -package metadata - -// This package deals with storing, retriving and deleting timestamp in a local file/Environment. -/* -import ( - "encoding/json" - "log" - "os" -) - -const metadataFile = "metadata.json" - -type Metadata struct { - Objects map[string]string `json:"objects"` // ObjectKey -> LastModified -} - -// Load metadata from local file -func LoadMetadata() (Metadata, error) { - data := Metadata{Objects: make(map[string]string)} - file, err := os.ReadFile(metadataFile) - if err != nil { - if os.IsNotExist(err) { - return data, nil - } - return data, err - } - err = json.Unmarshal(file, &data) - return data, err -} - -// Save metadata to local file -func SaveMetadata(data Metadata) error { - fileData, err := json.MarshalIndent(data, "", " ") - if err != nil { - return err - } - return os.WriteFile(metadataFile, fileData, 0644) -} - -// Delete metadata file after processing -func DeleteMetadataFile() { - err := os.Remove(metadataFile) - if err != nil { - log.Println("Error deleting metadata file:", err) - } else { - log.Println("Metadata file deleted successfully.") - } -} -*/ diff --git a/code-engine-cos2cos/metadata/timestamp.go b/code-engine-cos2cos/metadata/timestamp.go index 844c25a7..cee4f1c8 100644 --- a/code-engine-cos2cos/metadata/timestamp.go +++ b/code-engine-cos2cos/metadata/timestamp.go @@ -49,6 +49,7 @@ func GetLastProcessTime(buckets ...*bucketOperations.Bucket) time.Time { func LoadTimestamp(buckets []*bucketOperations.Bucket) ([]byte, error) { filename := os.Getenv("BUCKET_TIMESTAMP_FILENAME") + if len(buckets) > 0 { bucket := buckets[0] res, err := bucket.GetObject(filename) @@ -65,6 +66,7 @@ func LoadTimestamp(buckets []*bucketOperations.Bucket) ([]byte, error) { return file, err // TODO: Add functionality to get modified time directly from bucket object instead of file. (saves processing) + // As of now there is no otpion to get a last_modified_timestamp of a bucket. } // Function expects a timestamp as string in format "time.RFC3339" @@ -97,6 +99,4 @@ func saveTimestamp(filename string, jsonData []byte, bucket []*bucketOperations. } // To save as local file, will overwrite the existing file return os.WriteFile(filename, jsonData, 0644) - - // TODO: save it as object in secondary bucket } diff --git a/code-engine-cos2cos/process/process.go b/code-engine-cos2cos/process/process.go index 4f22961f..ab99eb79 100644 --- a/code-engine-cos2cos/process/process.go +++ b/code-engine-cos2cos/process/process.go @@ -17,7 +17,6 @@ import ( func ListAllObjectsForProcessing(b *bucketOperations.Bucket, last_time_modified time.Time) ([]string, error) { // Get all objects objects, err := b.ListBucketObjects() - // objectsAllPages, err := b.ListBucketObjectsPagination() if err != nil { return nil, fmt.Errorf("could not fetch bucket:%s objects\nError:%w", b.Name, err) @@ -28,13 +27,10 @@ func ListAllObjectsForProcessing(b *bucketOperations.Bucket, last_time_modified requiredObjectsKey := make([]string, 0) - // for _, object := range objects.Contents { for _, object := range objects.Contents { jobIndex := getObjectIndex(*object.Key, array_size) - // jobIndex := getObjectIndex(object, array_size) if jobIndex == cur_job_index { exists, err := b.CheckIfTagExists(*object.Key) - // exists, err := b.CheckIfTagExists(object) if err != nil { return nil, err } @@ -48,7 +44,6 @@ func ListAllObjectsForProcessing(b *bucketOperations.Bucket, last_time_modified func ListAllObjectsForProcessingPagination(b *bucketOperations.Bucket, last_time_modified time.Time) ([]string, error) { // Get all objects - // objects, err := b.ListBucketObjects() objectsAllPages, err := b.ListBucketObjectsPagination() fmt.Print(objectsAllPages) if err != nil { @@ -60,13 +55,10 @@ func ListAllObjectsForProcessingPagination(b *bucketOperations.Bucket, last_time requiredObjectsKey := make([]string, 0) - // for _, object := range objects.Contents { for _, object := range objectsAllPages.Contents { jobIndex := getObjectIndex(*object.Key, array_size) - // jobIndex := getObjectIndex(object, array_size) if jobIndex == cur_job_index { exists, err := b.CheckIfTagExists(*object.Key) - // exists, err := b.CheckIfTagExists(object) if err != nil { return nil, err } @@ -86,9 +78,6 @@ func getObjectIndex(object string, array_size int64) int64 { num := int64(object[0] - 'a') return num % array_size - // chunk_size := math.Ceil((26 / array_size)) - - // return int(float64(num) / chunk_size) } func StartProcessing(primary_bucket *bucketOperations.Bucket, secondary_bucket *bucketOperations.Bucket) error { @@ -180,8 +169,6 @@ func StartProcessingPagination(primary_bucket *bucketOperations.Bucket, secondar } func processObject(key string, primary_bucket *bucketOperations.Bucket, secondary_bucket *bucketOperations.Bucket, errorChan chan error) error { - // fmt.Println("The object:", key, " will be processed") - // Adding a tag to it, value is the jobrun name err := primary_bucket.AddTag(key, "isInProcessing", os.Getenv("CE_JOBRUN")) if err != nil { @@ -198,12 +185,7 @@ func processObject(key string, primary_bucket *bucketOperations.Bucket, secondar return err } - // 1. Either Let user get the object and process it - // processedObject := userDefinedProcess(object) - // Put the object into another bucket - // bucket.UploadObjectToBucket(processedObject) - - // 2. Let the user get the bytes and process it + // 1. Let the user get the bytes and process it // Convert to Bytes objectBytes, err := utils.ConvertObjectToByte(object) if err != nil { diff --git a/code-engine-cos2cos/process/process_test.go b/code-engine-cos2cos/process/process_test.go index e901e68c..b9690925 100644 --- a/code-engine-cos2cos/process/process_test.go +++ b/code-engine-cos2cos/process/process_test.go @@ -359,7 +359,4 @@ var _ = ginkgo.Describe("StartProcessing", func() { gomega.Expect(err).To(gomega.BeNil()) }) - // ginkgo.Context("should handle errors", func() { - - // }) }) diff --git a/code-engine-cos2cos/userDefinedProcess/processObject.go b/code-engine-cos2cos/userDefinedProcess/processObject.go index 7ba08963..d4ed360c 100644 --- a/code-engine-cos2cos/userDefinedProcess/processObject.go +++ b/code-engine-cos2cos/userDefinedProcess/processObject.go @@ -1,18 +1,10 @@ package userDefinedProcess import ( - "fmt" "strings" - - "github.com/IBM/ibm-cos-sdk-go/service/s3" ) // Sample function. To be modified as per use. -func UserDefinedProcessObject(object *s3.GetObjectOutput) *s3.GetObjectOutput { - fmt.Println(object) - return object -} - func UserDefinedProcessObjectBytes(objectBytes []byte) ([]byte, error) { return []byte(strings.ToUpper(string(objectBytes))), nil } From b6e795f71ed931ca55449f1844972e02d46a98d6 Mon Sep 17 00:00:00 2001 From: Shailza Thakur Date: Tue, 1 Jul 2025 18:16:10 +0530 Subject: [PATCH 07/19] Update readme Signed-off-by: Hamza Bharmal --- code-engine-cos2cos/README.md | 83 +++-------------------------------- 1 file changed, 6 insertions(+), 77 deletions(-) diff --git a/code-engine-cos2cos/README.md b/code-engine-cos2cos/README.md index 20ed4011..6aae6482 100644 --- a/code-engine-cos2cos/README.md +++ b/code-engine-cos2cos/README.md @@ -34,9 +34,9 @@ Before you begin, ensure you have the following: 1. ce 2. cos 3. iam -- **Podman** or **Docker**(Refer, [Build Steps](#build-steps)): For building the container. + - **IBM Cloud Object Storage (COS)**(Not required when using config.sh script): Ensure you have created two buckets (primary and secondary). Refer, [Getting started with IBM Cloud Object Storage](https://cloud.ibm.com/docs/cloud-object-storage?topic=cloud-object-storage-getting-started-cloud-object-storage). -- **IBM Code Engine**(Not required when using config.sh script): Set up and create a project on IBM Code Engine. Refer, [Getting started with IBM Cloud Code Engine](https://cloud.ibm.com/docs/codeengine?topic=codeengine-getting-started). +- **IBM Code Engine Project**(Not required when using config.sh script): Set up and create a project on IBM Code Engine. Refer, [Getting started with IBM Cloud Code Engine](https://cloud.ibm.com/docs/codeengine?topic=codeengine-getting-started). - **Go**: For modifying and testing the source code. ### Setup & Configuration @@ -56,11 +56,11 @@ Before you begin, ensure you have the following: ``` 4. **Update [data.sh](/data.sh)**: - Please update the variable values in the file as per your requirement. The details for creating the cos2cos job with config.sh and building image with build.sh is fetched from this file. + Please update the variable values in the file as per your requirement. The details for creating the cos2cos job with config.sh is fetched from this file. - Note:- The variable CRTokenFilePath should remain same as mentioned in the data.sh file. 5. **Build the Image from the source code**: - Refer [Build Steps](#build-steps) + + Image build is done automatically in the config.sh file from source. 6. **Setup the required IBM Cloud resources**: To automatically set up the required IBM Cloud resources, including COS buckets and secrets, simply run the provided `config.sh` script: @@ -77,36 +77,17 @@ Before you begin, ensure you have the following: Note: - The script will either take the existing project with the project name provided in data.sh or create a new project with the same name. - The script will create a trusted profile if it does not exists. - - The script will work only with authentication mechanism of Trusted Profile. In-case the user wants to use service-credentials then he/she needs to manually update the AUTH_SECRET and create the API_KEY for both COS Instance as well as add second command-line argument as "false". The first argument can be set to "true" or "false". See below for more. - - The two command line arguments are: - 1. The first argument "isInCodeEngine", a boolean value, is set to true by-default (meaning the job is running in IBM Cloud Code-Engine). It can be set to "false" when the user wants to run it locally. - 2. The second argument "isUsingTrustedProfile", a boolean value, is set to true by-default (meaning authentication mechanism for COS bucket is TrustedProfile). It can be set to "false" when the user wants to use service-credentials. Please make sure that the AUTH_SECRET is also updated to have the following (in-case using service-credentials): - ```bash - IBM_COS_API_KEY_PRIMARY= - IBM_COS_API_KEY_SECONDARY= - ``` - - IMP: TrustedProfile Authentication method won't work when running the program locally. 7. **Run the Program**: Once everything is configured, run it - - - Locally using - ```bash - go run . - ``` - OR - On IBM Cloud Code-Engine using CLI: ```bash ibmcloud ce jobrun submit --job ${JOB_NAME} --name ${JOB_NAME} ``` - This will: - Trigger the job in **IBM Code Engine** to process the files in the primary bucket. - Upload the processed files to the secondary bucket. - Note: - - If you are running the program locally, pass 2 command-line arguments "false" "false" while running main.go - - Also make sure that env file is configured accordingly. 8. **Check the Logs**: After the job completes, check the IBM Cloud UI or the logs to confirm the processing status. @@ -116,9 +97,6 @@ The system supports parallel execution using IBM Code Engine Job arrays. This en A sample version of the parallel job setup is included in the repository: https://github.ibm.com/Hamza/Sample-Running-Jobs-in-Parallel -## Environment Setup - -To run the project locally, you need to create a `.env` file in the root directory. You can refer to the [`env_sample`](/env_sample) file for the required environment variables and their format. ## Testing @@ -129,55 +107,6 @@ To run the tests: go test -v ``` -## Build Steps - -You can build and push the container image using one of the following methods. - -**Note**: If you are using the [build.sh](/build.sh), by-default the [Method-1](#1-build-using-source-code-local-source) is used. - -#### 1. Build Using Source Code (Local Source) - -To build the image from local source code using IBM Cloud Code Engine: - -```bash -ibmcloud ce build create --name ${BUILD_NAME} --build-type local --image ${REGISTRY}/${NAMESPACE}/${IMAGE_NAME} -ibmcloud ce buildrun submit --build ${BUILD_NAME} --name ${BUILD_NAME}-build-run -``` - -#### 2. Build Using Git-based Source - -To build the image using a Git repository: - -1. Create a deploy key or user-access key in your GitHub repository. -2. Add the private key by creating an SSH secret in IBM Cloud Code Engine. -3. Create a build using the Git repository: - -```bash -ibmcloud ce build create \ - --name ${BUILD_NAME} \ - --image ${REGISTRY}/${NAMESPACE}/${IMAGE_NAME} \ - --source ${GIT_SSH_URL} \ - --context-dir / \ - --strategy dockerfile \ - --git-repo-secret ${GIT_SSH_SECRET} -``` - -4. Submit the build: - -```bash -ibmcloud ce buildrun submit --build ${BUILD_NAME} -``` - -### View Build Logs - -To view the logs of a build run, use the following command: - -```bash -ibmcloud ce buildrun logs -f -n -``` - -Replace `` with the actual name of your build run. - ## Performance The program is optimized for handling large files (up to several GBs). For example, when tested with 50 files (each 65MB), the program processed the files in 70 to 100 seconds, with 13 parallel jobs. @@ -185,5 +114,5 @@ The program is optimized for handling large files (up to several GBs). For examp ## Troubleshooting - **Error: Object Not Found**: Ensure that the primary bucket is correctly configured and contains the objects you expect. -- **Authentication Failure**: Check that the authentication method (trusted profile or service credentials) is correctly set up. +- **Authentication Failure**: Check that the authentication method (trusted profile) is correctly set up. - **Job Timeout or Failure**: If the job takes longer than expected, check the logs for any performance bottlenecks or errors. From 12386216a04aa108c89113cf7750d903a07f3732 Mon Sep 17 00:00:00 2001 From: Shailza Thakur Date: Tue, 1 Jul 2025 18:16:32 +0530 Subject: [PATCH 08/19] Remove option for service-credentials. Set 3 variables to default properties Signed-off-by: Hamza Bharmal --- code-engine-cos2cos/main.go | 84 ++++++++++--------------------------- 1 file changed, 21 insertions(+), 63 deletions(-) diff --git a/code-engine-cos2cos/main.go b/code-engine-cos2cos/main.go index 3a6874d9..c635df30 100644 --- a/code-engine-cos2cos/main.go +++ b/code-engine-cos2cos/main.go @@ -5,7 +5,6 @@ import ( "os" "github.com/IBM/ibm-cos-sdk-go/service/s3" - "github.com/joho/godotenv" "ibm.com/codeengine/cos2cos/bucketOperations" "ibm.com/codeengine/cos2cos/process" ) @@ -16,74 +15,34 @@ import ( func main() { fmt.Println("Starting application with Job Index:", os.Getenv("JOB_INDEX")) - // Default Values for running the program - // Use service credentials and - var isInCodeEngine bool = true - var isUsingTrustedProfile bool = true + os.Setenv("IBM_COS_CRTokenFilePath_PRIMARY", "/var/run/secrets/codeengine.cloud.ibm.com/compute-resource-token/token") + os.Setenv("IBM_COS_CRTokenFilePath_SECONDARY", "/var/run/secrets/codeengine.cloud.ibm.com/compute-resource-token/token") + os.Setenv("BUCKET_TIMESTAMP_FILENAME", "last_modified_time.json") - // First argument states if the program is running in code engine env. (Default: Yes) - if len(os.Args) > 1 { - isInCodeEngine = os.Args[1] == "true" - if !isInCodeEngine { - os.Setenv("JOB_ARRAY_SIZE", "1") - os.Setenv("JOB_INDEX", "0") - } - } - - // Second argument states if the program uses trustedProfile (Default: Yes) - if len(os.Args) > 2 { - isUsingTrustedProfile = os.Args[2] == "true" - } - - if !isInCodeEngine { - err := godotenv.Load() - if err != nil { - fmt.Println("Error loading env file") - return - } - } - - fmt.Println("Any errors while processing objects:", processingMain(isUsingTrustedProfile)) + fmt.Println("Any errors while processing objects:", processingMain()) - fmt.Println("Clsoing application") + fmt.Println("Closing application") } // Function will initiate the buckets, sessions and trigger the StartProcessing function for actual processing // accepts a boolean argument which decides how to create COS-Client (Using TrustedProfile or Service Credentials) -func processingMain(useTrustedProfile bool) error { - // fmt.Println("hehe") - +func processingMain() error { var cosClient_PRIMARY, cosClient_SECONDARY *s3.S3 - if !useTrustedProfile { - // Create a cosClient using Service Credentials - cosClient_PRIMARY = bucketOperations.NewCosClient(os.Getenv("IBM_COS_API_KEY_PRIMARY"), - os.Getenv("IBM_COS_RESOURCE_INSTANCE_ID_PRIMARY"), - os.Getenv("IBM_COS_ENDPOINT_PRIMARY"), - os.Getenv("IBM_COS_REGION_PRIMARY"), - ) - - cosClient_SECONDARY = bucketOperations.NewCosClient(os.Getenv("IBM_COS_API_KEY_SECONDARY"), - os.Getenv("IBM_COS_RESOURCE_INSTANCE_ID_SECONDARY"), - os.Getenv("IBM_COS_ENDPOINT_SECONDARY"), - os.Getenv("IBM_COS_REGION_SECONDARY"), - ) - } else { - // Create a cosClient using Trusted Profile - cosClient_PRIMARY = bucketOperations.NewCosClientTrustedProfile(os.Getenv("IBM_COS_TRUSTED_PROFILE_ID_PRIMARY"), - os.Getenv("IBM_COS_CRTokenFilePath_PRIMARY"), - os.Getenv("IBM_COS_RESOURCE_INSTANCE_ID_PRIMARY"), - os.Getenv("IBM_COS_ENDPOINT_PRIMARY"), - os.Getenv("IBM_COS_REGION_PRIMARY"), - ) + // Create a cosClient using Trusted Profile + cosClient_PRIMARY = bucketOperations.NewCosClientTrustedProfile(os.Getenv("IBM_COS_TRUSTED_PROFILE_ID_PRIMARY"), + os.Getenv("IBM_COS_CRTokenFilePath_PRIMARY"), + os.Getenv("IBM_COS_RESOURCE_INSTANCE_ID_PRIMARY"), + os.Getenv("IBM_COS_ENDPOINT_PRIMARY"), + os.Getenv("IBM_COS_REGION_PRIMARY"), + ) - cosClient_SECONDARY = bucketOperations.NewCosClientTrustedProfile(os.Getenv("IBM_COS_TRUSTED_PROFILE_ID_SECONDARY"), - os.Getenv("IBM_COS_CRTokenFilePath_SECONDARY"), - os.Getenv("IBM_COS_RESOURCE_INSTANCE_ID_SECONDARY"), - os.Getenv("IBM_COS_ENDPOINT_SECONDARY"), - os.Getenv("IBM_COS_REGION_SECONDARY"), - ) - } + cosClient_SECONDARY = bucketOperations.NewCosClientTrustedProfile(os.Getenv("IBM_COS_TRUSTED_PROFILE_ID_SECONDARY"), + os.Getenv("IBM_COS_CRTokenFilePath_SECONDARY"), + os.Getenv("IBM_COS_RESOURCE_INSTANCE_ID_SECONDARY"), + os.Getenv("IBM_COS_ENDPOINT_SECONDARY"), + os.Getenv("IBM_COS_REGION_SECONDARY"), + ) // Creating Primary bucket instance primary_bucket := bucketOperations.NewBucket( @@ -96,15 +55,14 @@ func processingMain(useTrustedProfile bool) error { os.Getenv("SECONDARY_COS_BUCKET_NAME"), cosClient_SECONDARY, ) - // fmt.Println("hehe") // Initiate the process from primary bucket to secondary bucket // err := process.StartProcessing(primary_bucket, secondary_bucket) err := process.StartProcessingPagination(primary_bucket, secondary_bucket) if err != nil { - return fmt.Errorf("\nBackup failed: %v", err) + return fmt.Errorf("\nProcessing failed: %v", err) } - fmt.Println("\nBackup completed successfully.") + fmt.Println("\nProcessing completed successfully.") return nil } From 413c8734c62d60e52e509c75b80efad4a85e3706 Mon Sep 17 00:00:00 2001 From: Shailza Thakur Date: Tue, 1 Jul 2025 18:42:00 +0530 Subject: [PATCH 09/19] Update Dockerfile to use distroless image Signed-off-by: Hamza Bharmal --- code-engine-cos2cos/Dockerfile | 40 ++++++++++++++++++++++++++-------- 1 file changed, 31 insertions(+), 9 deletions(-) diff --git a/code-engine-cos2cos/Dockerfile b/code-engine-cos2cos/Dockerfile index dac007f1..53a3d55f 100644 --- a/code-engine-cos2cos/Dockerfile +++ b/code-engine-cos2cos/Dockerfile @@ -1,25 +1,47 @@ -FROM golang:1.23-alpine as builder +# FROM golang:1.23-alpine as builder -WORKDIR /app +# WORKDIR /app -COPY go.mod go.sum ./ +# COPY go.mod go.sum ./ -RUN go mod tidy +# RUN go mod tidy + +# COPY . . + +# # Build the Go application for Linux, stripped of debug information +# RUN CGO_ENABLED=0 GOOS=linux go build -o cos2cos + +# # Stage 2: Final stage (minimal image) +# FROM alpine + +# RUN apk --no-cache add ca-certificates + + +# # Copy the binary from the builder stage +# COPY --from=builder /app/cos2cos / + +# RUN chmod +x /cos2cos + +# ENTRYPOINT ["/cos2cos"] + + +FROM quay.io/projectquay/golang:1.23 AS builder +WORKDIR /go/src/app COPY . . +RUN go mod tidy + # Build the Go application for Linux, stripped of debug information RUN CGO_ENABLED=0 GOOS=linux go build -o cos2cos # Stage 2: Final stage (minimal image) -FROM alpine +FROM gcr.io/distroless/static-debian12 -RUN apk --no-cache add ca-certificates +# RUN apk --no-cache add ca-certificates # Copy the binary from the builder stage -COPY --from=builder /app/cos2cos / - -RUN chmod +x /cos2cos +COPY --from=builder /go/src/app/cos2cos / ENTRYPOINT ["/cos2cos"] \ No newline at end of file From 0dff2847dba0aaa808c2e7c093a6f8c54ba2a50a Mon Sep 17 00:00:00 2001 From: Shailza Thakur Date: Tue, 1 Jul 2025 18:43:57 +0530 Subject: [PATCH 10/19] Remove comments Signed-off-by: Hamza Bharmal --- code-engine-cos2cos/Dockerfile | 27 --------------------------- 1 file changed, 27 deletions(-) diff --git a/code-engine-cos2cos/Dockerfile b/code-engine-cos2cos/Dockerfile index 53a3d55f..020c5642 100644 --- a/code-engine-cos2cos/Dockerfile +++ b/code-engine-cos2cos/Dockerfile @@ -1,30 +1,3 @@ -# FROM golang:1.23-alpine as builder - -# WORKDIR /app - -# COPY go.mod go.sum ./ - -# RUN go mod tidy - -# COPY . . - -# # Build the Go application for Linux, stripped of debug information -# RUN CGO_ENABLED=0 GOOS=linux go build -o cos2cos - -# # Stage 2: Final stage (minimal image) -# FROM alpine - -# RUN apk --no-cache add ca-certificates - - -# # Copy the binary from the builder stage -# COPY --from=builder /app/cos2cos / - -# RUN chmod +x /cos2cos - -# ENTRYPOINT ["/cos2cos"] - - FROM quay.io/projectquay/golang:1.23 AS builder WORKDIR /go/src/app From 3c98de51b3db8adcb1fdf5117bc773b5d43c84ee Mon Sep 17 00:00:00 2001 From: Shailza Thakur Date: Tue, 1 Jul 2025 18:45:09 +0530 Subject: [PATCH 11/19] Remove comments Signed-off-by: Hamza Bharmal --- code-engine-cos2cos/main.go | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/code-engine-cos2cos/main.go b/code-engine-cos2cos/main.go index c635df30..6c8e4599 100644 --- a/code-engine-cos2cos/main.go +++ b/code-engine-cos2cos/main.go @@ -9,12 +9,10 @@ import ( "ibm.com/codeengine/cos2cos/process" ) -// Note: When running the program in local env, pass false, false as cmd-line arg. -// Arg1: isInCodeEngine <- to load the env file from local Os. -// Arg2: isUsingTrustedProfile <- Works only with kubernetes cluster. func main() { fmt.Println("Starting application with Job Index:", os.Getenv("JOB_INDEX")) + // Setting default values os.Setenv("IBM_COS_CRTokenFilePath_PRIMARY", "/var/run/secrets/codeengine.cloud.ibm.com/compute-resource-token/token") os.Setenv("IBM_COS_CRTokenFilePath_SECONDARY", "/var/run/secrets/codeengine.cloud.ibm.com/compute-resource-token/token") os.Setenv("BUCKET_TIMESTAMP_FILENAME", "last_modified_time.json") From cc31150a9435507a3f09f69a91585ca44b232c89 Mon Sep 17 00:00:00 2001 From: Hamza Date: Wed, 2 Jul 2025 18:04:43 +0530 Subject: [PATCH 12/19] update script to use single trusted profile, Update base-secret and auth-secret Signed-off-by: Hamza Bharmal --- code-engine-cos2cos/config.sh | 114 ++++++++++++++++------------------ 1 file changed, 53 insertions(+), 61 deletions(-) diff --git a/code-engine-cos2cos/config.sh b/code-engine-cos2cos/config.sh index 88a703ff..9fcd2e20 100755 --- a/code-engine-cos2cos/config.sh +++ b/code-engine-cos2cos/config.sh @@ -44,19 +44,24 @@ function create_instance_bucket(){ COS_INSTANCE_CRN_SECONDARY=${COS_INSTANCE_CRN} fi - # Creating bucket in the instance - silent failure. - echo "Step $I.2: Creating $TYPE Bucket." - ibmcloud cos bucket-create \ - --bucket ${COS_BUCKET_NAME} \ - --class smart \ - --ibm-service-instance-id ${COS_INSTANCE_CRN} \ - --region ${COS_REGION} 2>/dev/null - # Check if bucket exists. + ibmcloud cos config crn --crn "${COS_INSTANCE_CRN}" --force ibmcloud cos bucket-head --bucket "$COS_BUCKET_NAME" --region "$COS_REGION" if [ $? -ne 0 ]; then - echo -e "${RED}Failure${NC}: Step $I.2: $TYPE Bucket does not exists. Exiting..." - exit 1 + echo "Step $I.2: Creating $TYPE Bucket." + ibmcloud cos bucket-create \ + --bucket ${COS_BUCKET_NAME} \ + --class smart \ + --ibm-service-instance-id ${COS_INSTANCE_CRN} \ + --region ${COS_REGION} 2>/dev/null + ibmcloud cos bucket-head --bucket "$COS_BUCKET_NAME" --region "$COS_REGION" + + if [ $? -ne 0 ]; then + echo -e "${RED}Failure${NC}: Step $I.2: $TYPE Bucket creation failed. Exiting..." + exit 0 + else + echo -e "${GREEN}Success${NC}: Step $I.2: $TYPE Bucket Created." + fi else echo -e "${GREEN}Success${NC}: Step $I.2: $TYPE Bucket Found." fi @@ -89,15 +94,15 @@ fi # Creating Trusted Profile echo "---" echo "Step 5: Creating/Fetching Trusted Profile $TRUSTED_PROFILE_NAME" -COS_TRUSTED_PROFILE_ID_PRIMARY=$(ibmcloud iam trusted-profile ${TRUSTED_PROFILE_NAME} --id) -if [ -z "$COS_TRUSTED_PROFILE_ID_PRIMARY" ] ; then - COS_TRUSTED_PROFILE_ID_PRIMARY=$(ibmcloud iam trusted-profile-create "${TRUSTED_PROFILE_NAME}" -o JSON | jq -r '.id') +COS_TRUSTED_PROFILE_ID=$(ibmcloud iam trusted-profile ${TRUSTED_PROFILE_NAME} --id) +if [ -z "$COS_TRUSTED_PROFILE_ID" ] ; then + COS_TRUSTED_PROFILE_ID=$(ibmcloud iam trusted-profile-create "${TRUSTED_PROFILE_NAME}" -o JSON | jq -r '.id') if [ $? -ne 0 ]; then echo -e "${RED}Failure${NC}: Step 5: Could not create trusted-profile.Exiting\n" exit 1 fi fi -COS_TRUSTED_PROFILE_ID_SECONDARY=$(echo ${COS_TRUSTED_PROFILE_ID_PRIMARY}) + if [ $? -eq 0 ]; then echo -e "${GREEN}SUCCESS${NC}: Step 5: Trusted Profile Created/Fetched.\n" fi @@ -107,15 +112,14 @@ echo "Step 6: Creating Secrets (Base Secret, Auth Secret)" echo "---" echo "Step 6.1: Creating Base Secret: $BASE_SECRET" ibmcloud ce secret create --name ${BASE_SECRET} \ - --from-literal SECONDARY_COS_BUCKET_NAME=${COS_BUCKET_NAME_SECONDARY} \ - --from-literal IBM_COS_RESOURCE_INSTANCE_ID_SECONDARY=${COS_INSTANCE_CRN_SECONDARY} \ - --from-literal IBM_COS_REGION_SECONDARY=${COS_REGION_SECONDARY} \ - --from-literal IBM_COS_ENDPOINT_SECONDARY=${COS_ENDPOINT_SECONDARY} \ - --from-literal PRIMARY_COS_BUCKET_NAME=${COS_BUCKET_NAME_PRIMARY} \ + --from-literal COS_BUCKET_NAME_PRIMARY=${COS_BUCKET_NAME_PRIMARY} \ --from-literal IBM_COS_RESOURCE_INSTANCE_ID_PRIMARY=${COS_INSTANCE_CRN_PRIMARY} \ --from-literal IBM_COS_REGION_PRIMARY=${COS_REGION_PRIMARY} \ --from-literal IBM_COS_ENDPOINT_PRIMARY=${COS_ENDPOINT_PRIMARY} \ - --from-literal BUCKET_TIMESTAMP_FILENAME=${BUCKET_TIMESTAMP_FILENAME} + --from-literal COS_BUCKET_NAME_SECONDARY=${COS_BUCKET_NAME_SECONDARY} \ + --from-literal IBM_COS_RESOURCE_INSTANCE_ID_SECONDARY=${COS_INSTANCE_CRN_SECONDARY} \ + --from-literal IBM_COS_REGION_SECONDARY=${COS_REGION_SECONDARY} \ + --from-literal IBM_COS_ENDPOINT_SECONDARY=${COS_ENDPOINT_SECONDARY} if [ $? -ne 0 ]; then echo "Secret '${BASE_SECRET}' already exists." @@ -123,15 +127,14 @@ if [ $? -ne 0 ]; then if [[ "$confirm" =~ ^[Yy]$ ]]; then echo "Updating secret ${BASE_SECRET}..." ibmcloud ce secret update --name ${BASE_SECRET} \ - --from-literal SECONDARY_COS_BUCKET_NAME=${COS_BUCKET_NAME_SECONDARY} \ - --from-literal IBM_COS_RESOURCE_INSTANCE_ID_SECONDARY=${COS_INSTANCE_CRN_SECONDARY} \ - --from-literal IBM_COS_REGION_SECONDARY=${COS_REGION_SECONDARY} \ - --from-literal IBM_COS_ENDPOINT_SECONDARY=${COS_ENDPOINT_SECONDARY} \ - --from-literal PRIMARY_COS_BUCKET_NAME=${COS_BUCKET_NAME_PRIMARY} \ - --from-literal IBM_COS_RESOURCE_INSTANCE_ID_PRIMARY=${COS_INSTANCE_CRN_PRIMARY} \ - --from-literal IBM_COS_REGION_PRIMARY=${COS_REGION_PRIMARY} \ - --from-literal IBM_COS_ENDPOINT_PRIMARY=${COS_ENDPOINT_PRIMARY} \ - --from-literal BUCKET_TIMESTAMP_FILENAME=${BUCKET_TIMESTAMP_FILENAME} + --from-literal COS_BUCKET_NAME_PRIMARY=${COS_BUCKET_NAME_PRIMARY} \ + --from-literal IBM_COS_RESOURCE_INSTANCE_ID_PRIMARY=${COS_INSTANCE_CRN_PRIMARY} \ + --from-literal IBM_COS_REGION_PRIMARY=${COS_REGION_PRIMARY} \ + --from-literal IBM_COS_ENDPOINT_PRIMARY=${COS_ENDPOINT_PRIMARY} \ + --from-literal COS_BUCKET_NAME_SECONDARY=${COS_BUCKET_NAME_SECONDARY} \ + --from-literal IBM_COS_RESOURCE_INSTANCE_ID_SECONDARY=${COS_INSTANCE_CRN_SECONDARY} \ + --from-literal IBM_COS_REGION_SECONDARY=${COS_REGION_SECONDARY} \ + --from-literal IBM_COS_ENDPOINT_SECONDARY=${COS_ENDPOINT_SECONDARY} if [ $? -eq 0 ]; then echo -e "${GREEN}SUCCESS${NC}: Step 6.1: Base secret update complete." @@ -151,20 +154,15 @@ fi echo "---" echo "Step 6.2: Creating Auth Secret: $AUTH_SECRET" ibmcloud ce secret create --name ${AUTH_SECRET} \ - --from-literal IBM_COS_CRTokenFilePath_PRIMARY=${IBM_COS_CRTokenFilePath_PRIMARY} \ - --from-literal IBM_COS_CRTokenFilePath_SECONDARY=${IBM_COS_CRTokenFilePath_SECONDARY} \ - --from-literal IBM_COS_TRUSTED_PROFILE_ID_PRIMARY=${COS_TRUSTED_PROFILE_ID_PRIMARY} \ - --from-literal IBM_COS_TRUSTED_PROFILE_ID_SECONDARY=${COS_TRUSTED_PROFILE_ID_SECONDARY} + --from-literal IBM_COS_TRUSTED_PROFILE_ID=${COS_TRUSTED_PROFILE_ID} + if [ $? -ne 0 ]; then echo "Secret '${AUTH_SECRET}' already exists." read -p "Do you want to override the existing auth secret? (y/n): " confirm if [[ "$confirm" =~ ^[Yy]$ ]]; then echo "Updating secret ${AUTH_SECRET}..." ibmcloud ce secret update --name ${AUTH_SECRET} \ - --from-literal IBM_COS_CRTokenFilePath_PRIMARY=${IBM_COS_CRTokenFilePath_PRIMARY} \ - --from-literal IBM_COS_CRTokenFilePath_SECONDARY=${IBM_COS_CRTokenFilePath_SECONDARY} \ - --from-literal IBM_COS_TRUSTED_PROFILE_ID_PRIMARY=${COS_TRUSTED_PROFILE_ID_PRIMARY} \ - --from-literal IBM_COS_TRUSTED_PROFILE_ID_SECONDARY=${COS_TRUSTED_PROFILE_ID_SECONDARY} + --from-literal IBM_COS_TRUSTED_PROFILE_ID=${COS_TRUSTED_PROFILE_ID} if [ $? -eq 0 ]; then echo -e "${GREEN}SUCCESS${NC}: Step 6.3: Auth secret update complete." @@ -187,20 +185,27 @@ ibmcloud ce job create --name ${JOB_NAME} \ --src "." \ --env-from-secret ${BASE_SECRET} \ --env-from-secret ${AUTH_SECRET} \ - --argument true 2>/dev/null \ - --wait - # --registry-secret ${CONTAINER_REGISTRY_SECRET} \ + --wait \ + --trusted-profiles-enabled \ + --argument true 2>/dev/null if [ $? -ne 0 ]; then - # echo "Job '${JOB_NAME}' already exists. Exiting" - # exit 1 + echo "Job '${JOB_NAME}' already exists." + read -p "Do you want to override the existing Job? (y/n): " confirm + if [[ "$confirm" =~ ^[Yy]$ ]]; then - echo "Job '${JOB_NAME}' already exists. Updating Job." - ibmcloud ce job update --name ${JOB_NAME} --image "${JOB_IMAGE}" \ - --env-from-secret ${BASE_SECRET} \ - --env-from-secret ${AUTH_SECRET} \ - --argument true 2>/dev/null - # --registry-secret ${CONTAINER_REGISTRY_SECRET} \ + echo "Job '${JOB_NAME}' already exists. Updating Job." + ibmcloud ce job update --name ${JOB_NAME} \ + --src "." \ + --env-from-secret ${BASE_SECRET} \ + --env-from-secret ${AUTH_SECRET} \ + --wait \ + --trusted-profiles-enabled \ + --argument true 2>/dev/null + else + echo "Job update cancelled by user. Exiting..." + exit 0 + fi fi if [ $? -eq 0 ]; then echo -e "${GREEN}SUCCESS${NC}Step 7: Job Created" @@ -231,16 +236,3 @@ ibmcloud iam trusted-profile-policy-create ${TRUSTED_PROFILE_NAME} \ --service-name cloud-object-storage \ --service-instance ${COS_INSTANCE_CRN_SECONDARY} 2>/dev/null # echo "***** DONE: Linking Secondary COS To Trusted Profile" - -echo "Step 8.4: Compute Resource Token" -curl \ - --request PATCH "https://api.${PROJECT_REGION}.codeengine.cloud.ibm.com/v2/projects/$(ibmcloud ce project current --output json | jq -r .guid)/jobs/${JOB_NAME}" \ - --header 'Accept: application/json' \ - --header "Authorization: $(ibmcloud iam oauth-tokens --output json | jq -r '.iam_token')" \ - --header 'Content-Type: application/merge-patch+json' \ - --header 'If-Match: *' \ - --data-raw "{ - \"run_compute_resource_token_enabled\": true - }" 2>/dev/null -# echo "******* DONE: Compute Resource Token *******" - From 0eb88fe63363b4d0fe2dfabc71e837de9ba30106 Mon Sep 17 00:00:00 2001 From: Hamza Date: Wed, 2 Jul 2025 18:06:22 +0530 Subject: [PATCH 13/19] Remove extra variables to default values Signed-off-by: Hamza Bharmal --- code-engine-cos2cos/data.sh | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/code-engine-cos2cos/data.sh b/code-engine-cos2cos/data.sh index b76606f6..2200e751 100644 --- a/code-engine-cos2cos/data.sh +++ b/code-engine-cos2cos/data.sh @@ -1,15 +1,15 @@ #!/bin/sh # Regions -COS_REGION_PRIMARY=au-syd +COS_REGION_PRIMARY=eu-es COS_REGION_SECONDARY=eu-es PROJECT_REGION=eu-es PROJECT_NAME="Cos2Cos" # Resource groups PROJECT_RESOURCE_GROUP=resource-group -COS_RESOURCE_GROUP_PRIMARY=resource-group -COS_RESOURCE_GROUP_SECONDARY=resource-group +COS_RESOURCE_GROUP_PRIMARY=${PROJECT_RESOURCE_GROUP} +COS_RESOURCE_GROUP_SECONDARY=${PROJECT_RESOURCE_GROUP} #Creating a COS Instance COS_INSTANCE_NAME_PRIMARY=cos-instance-one @@ -20,7 +20,7 @@ COS_BUCKET_NAME_PRIMARY=cos-bucket-one COS_BUCKET_NAME_SECONDARY=cos-bucket-two # Endpoints -COS_ENDPOINT_PRIMARY=s3.au-syd.cloud-object-storage.appdomain.cloud +COS_ENDPOINT_PRIMARY=s3.eu-es.cloud-object-storage.appdomain.cloud COS_ENDPOINT_SECONDARY=s3.eu-es.cloud-object-storage.appdomain.cloud # Trusted Profile @@ -31,7 +31,4 @@ JOB_NAME=cos2cos-job # Define registry credentials BASE_SECRET=ce-secret -AUTH_SECRET=auth-secret - -# Array Size of Job run Instance -ARRAY_SIZE=13 \ No newline at end of file +AUTH_SECRET=auth-secret \ No newline at end of file From cb93ed3f222008e8bfb13d4629cbd40e14c8e10b Mon Sep 17 00:00:00 2001 From: Hamza Date: Wed, 2 Jul 2025 18:06:30 +0530 Subject: [PATCH 14/19] Remove dead-code Signed-off-by: Hamza Bharmal --- code-engine-cos2cos/Dockerfile | 3 --- 1 file changed, 3 deletions(-) diff --git a/code-engine-cos2cos/Dockerfile b/code-engine-cos2cos/Dockerfile index 020c5642..2d2a8fee 100644 --- a/code-engine-cos2cos/Dockerfile +++ b/code-engine-cos2cos/Dockerfile @@ -11,9 +11,6 @@ RUN CGO_ENABLED=0 GOOS=linux go build -o cos2cos # Stage 2: Final stage (minimal image) FROM gcr.io/distroless/static-debian12 -# RUN apk --no-cache add ca-certificates - - # Copy the binary from the builder stage COPY --from=builder /go/src/app/cos2cos / From ecef454d83e10ddc914e0d104b7c12d6c1c7d6d9 Mon Sep 17 00:00:00 2001 From: Hamza Date: Wed, 2 Jul 2025 18:06:44 +0530 Subject: [PATCH 15/19] Delete sample env Signed-off-by: Hamza Bharmal --- code-engine-cos2cos/env_sample | 33 --------------------------------- 1 file changed, 33 deletions(-) delete mode 100644 code-engine-cos2cos/env_sample diff --git a/code-engine-cos2cos/env_sample b/code-engine-cos2cos/env_sample deleted file mode 100644 index 36cbfd07..00000000 --- a/code-engine-cos2cos/env_sample +++ /dev/null @@ -1,33 +0,0 @@ -IBM_COS_API_KEY_PRIMARY=<> -IBM_COS_RESOURCE_INSTANCE_ID_PRIMARY=<> -IBM_COS_ENDPOINT_PRIMARY=<> -IBM_COS_REGION_PRIMARY= <> -PRIMARY_COS_BUCKET_NAME=<> - -IBM_COS_API_KEY_SECONDARY=<> -IBM_COS_ENDPOINT_SECONDARY=<> -IBM_COS_RESOURCE_INSTANCE_ID_SECONDARY=<> -IBM_COS_REGION_SECONDARY=<> -SECONDARY_COS_BUCKET_NAME=<> - -BUCKET_TIMESTAMP_FILENAME=temp/last_modified_time.json - -# Primary Bucket Trusted Profile Credentials -IBM_COS_TRUSTED_PROFILE_ID_PRIMARY= <> -IBM_COS_CRTokenFilePath_PRIMARY=/var/run/secrets/tokens/service-account-token -IBM_COS_RESOURCE_INSTANCE_ID_PRIMARY=<> -IBM_COS_ENDPOINT_PRIMARY=<> -IBM_COS_REGION_PRIMARY=<> - -# Secondary Bucket Trusted Profile Credentials -IBM_COS_TRUSTED_PROFILE_ID_SECONDARY= <> -IBM_COS_CRTokenFilePath_SECONDARY=/var/run/secrets/tokens/service-account-token -IBM_COS_RESOURCE_INSTANCE_ID_SECONDARY=<> -IBM_COS_REGION_SECONDARY=<> - -# Bucket Names -# PRIMARY_COS_BUCKET_NAME=<> -# SECONDARY_COS_BUCKET_NAME=<> - -# IBM Auth Endpoint (Typically you don't need to change this) -IBM_AUTH_ENDPOINT=https://iam.cloud.ibm.com/identity/token From 04058a8d3f2dd6c95ce07ada5b20406efe535941 Mon Sep 17 00:00:00 2001 From: Hamza Date: Wed, 2 Jul 2025 18:07:20 +0530 Subject: [PATCH 16/19] Update timestamp.go to use default filename Signed-off-by: Hamza Bharmal --- code-engine-cos2cos/metadata/timestamp.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/code-engine-cos2cos/metadata/timestamp.go b/code-engine-cos2cos/metadata/timestamp.go index cee4f1c8..74eb5c09 100644 --- a/code-engine-cos2cos/metadata/timestamp.go +++ b/code-engine-cos2cos/metadata/timestamp.go @@ -48,7 +48,7 @@ func GetLastProcessTime(buckets ...*bucketOperations.Bucket) time.Time { // Function to load timestamp file locally or using a object in Bucket func LoadTimestamp(buckets []*bucketOperations.Bucket) ([]byte, error) { - filename := os.Getenv("BUCKET_TIMESTAMP_FILENAME") + filename := "last_modified_time.json" if len(buckets) > 0 { bucket := buckets[0] @@ -71,7 +71,7 @@ func LoadTimestamp(buckets []*bucketOperations.Bucket) ([]byte, error) { // Function expects a timestamp as string in format "time.RFC3339" func PutLastBackupTime(timestamp string, bucket ...*bucketOperations.Bucket) error { - filename := os.Getenv("BUCKET_TIMESTAMP_FILENAME") + filename := "last_modified_time.json" if timestamp == "" { timestamp = time.Now().Format(time.RFC3339) From 4d8601809c21b906332428d902b04b5d26c269df Mon Sep 17 00:00:00 2001 From: Hamza Date: Wed, 2 Jul 2025 18:07:59 +0530 Subject: [PATCH 17/19] Update to use default values, Support single-trusted-profile Signed-off-by: Hamza Bharmal --- code-engine-cos2cos/main.go | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/code-engine-cos2cos/main.go b/code-engine-cos2cos/main.go index 6c8e4599..328210e6 100644 --- a/code-engine-cos2cos/main.go +++ b/code-engine-cos2cos/main.go @@ -12,11 +12,6 @@ import ( func main() { fmt.Println("Starting application with Job Index:", os.Getenv("JOB_INDEX")) - // Setting default values - os.Setenv("IBM_COS_CRTokenFilePath_PRIMARY", "/var/run/secrets/codeengine.cloud.ibm.com/compute-resource-token/token") - os.Setenv("IBM_COS_CRTokenFilePath_SECONDARY", "/var/run/secrets/codeengine.cloud.ibm.com/compute-resource-token/token") - os.Setenv("BUCKET_TIMESTAMP_FILENAME", "last_modified_time.json") - fmt.Println("Any errors while processing objects:", processingMain()) fmt.Println("Closing application") @@ -27,16 +22,19 @@ func main() { func processingMain() error { var cosClient_PRIMARY, cosClient_SECONDARY *s3.S3 + // CR Token_FilePath (stays default) + crTokenFilePath := "/var/run/secrets/codeengine.cloud.ibm.com/compute-resource-token/token" + // Create a cosClient using Trusted Profile - cosClient_PRIMARY = bucketOperations.NewCosClientTrustedProfile(os.Getenv("IBM_COS_TRUSTED_PROFILE_ID_PRIMARY"), - os.Getenv("IBM_COS_CRTokenFilePath_PRIMARY"), + cosClient_PRIMARY = bucketOperations.NewCosClientTrustedProfile(os.Getenv("IBM_COS_TRUSTED_PROFILE_ID"), + crTokenFilePath, os.Getenv("IBM_COS_RESOURCE_INSTANCE_ID_PRIMARY"), os.Getenv("IBM_COS_ENDPOINT_PRIMARY"), os.Getenv("IBM_COS_REGION_PRIMARY"), ) - cosClient_SECONDARY = bucketOperations.NewCosClientTrustedProfile(os.Getenv("IBM_COS_TRUSTED_PROFILE_ID_SECONDARY"), - os.Getenv("IBM_COS_CRTokenFilePath_SECONDARY"), + cosClient_SECONDARY = bucketOperations.NewCosClientTrustedProfile(os.Getenv("IBM_COS_TRUSTED_PROFILE_ID"), + crTokenFilePath, os.Getenv("IBM_COS_RESOURCE_INSTANCE_ID_SECONDARY"), os.Getenv("IBM_COS_ENDPOINT_SECONDARY"), os.Getenv("IBM_COS_REGION_SECONDARY"), @@ -44,17 +42,16 @@ func processingMain() error { // Creating Primary bucket instance primary_bucket := bucketOperations.NewBucket( - os.Getenv("PRIMARY_COS_BUCKET_NAME"), + os.Getenv("COS_BUCKET_NAME_PRIMARY"), cosClient_PRIMARY, ) // Creating secondary bucket instance secondary_bucket := bucketOperations.NewBucket( - os.Getenv("SECONDARY_COS_BUCKET_NAME"), + os.Getenv("COS_BUCKET_NAME_SECONDARY"), cosClient_SECONDARY, ) // Initiate the process from primary bucket to secondary bucket - // err := process.StartProcessing(primary_bucket, secondary_bucket) err := process.StartProcessingPagination(primary_bucket, secondary_bucket) if err != nil { return fmt.Errorf("\nProcessing failed: %v", err) From e88ca7bbcb830a4ad39828a46d5b7eab282567d9 Mon Sep 17 00:00:00 2001 From: Hamza Date: Wed, 2 Jul 2025 18:24:56 +0530 Subject: [PATCH 18/19] Update readme Signed-off-by: Hamza Bharmal --- code-engine-cos2cos/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/code-engine-cos2cos/README.md b/code-engine-cos2cos/README.md index 6aae6482..b81ed020 100644 --- a/code-engine-cos2cos/README.md +++ b/code-engine-cos2cos/README.md @@ -43,7 +43,7 @@ Before you begin, ensure you have the following: 1. **Clone the Repository**: ```bash git clone https://github.com/IBM/CodeEngine.git - cd code-engine-cos2cos + cd cos2cos ``` 2. **Modify the Processing Function (Optional)**: From 6da59ad1b5951aaa28328006c6d7d29835fdc37f Mon Sep 17 00:00:00 2001 From: Hamza Bharmal Date: Tue, 7 Oct 2025 14:16:07 +0530 Subject: [PATCH 19/19] Update pkg versions Signed-off-by: Hamza Bharmal --- code-engine-cos2cos/go.mod | 38 +++++++++++++++++++------------------ code-engine-cos2cos/go.sum | 39 ++++++++++++++++++++++++++++++++++++-- 2 files changed, 57 insertions(+), 20 deletions(-) diff --git a/code-engine-cos2cos/go.mod b/code-engine-cos2cos/go.mod index 7afa0abf..586e1973 100644 --- a/code-engine-cos2cos/go.mod +++ b/code-engine-cos2cos/go.mod @@ -1,34 +1,34 @@ module ibm.com/codeengine/cos2cos -go 1.23.5 +go 1.24.0 require ( - github.com/IBM/ibm-cos-sdk-go v1.12.1 - github.com/joho/godotenv v1.5.1 + github.com/IBM/ibm-cos-sdk-go v1.12.3 github.com/onsi/ginkgo v1.16.5 - github.com/onsi/ginkgo/v2 v2.23.0 - github.com/onsi/gomega v1.36.2 - github.com/stretchr/testify v1.10.0 + github.com/onsi/ginkgo/v2 v2.23.3 + github.com/onsi/gomega v1.37.0 + github.com/stretchr/testify v1.11.1 ) require ( - github.com/IBM/go-sdk-core/v5 v5.18.5 // indirect + github.com/IBM/go-sdk-core/v5 v5.21.0 // indirect github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/fsnotify/fsnotify v1.6.0 // indirect - github.com/gabriel-vasile/mimetype v1.4.3 // indirect + github.com/gabriel-vasile/mimetype v1.4.10 // indirect github.com/go-logr/logr v1.4.2 // indirect - github.com/go-openapi/errors v0.21.0 // indirect - github.com/go-openapi/strfmt v0.22.1 // indirect + github.com/go-openapi/errors v0.22.3 // indirect + github.com/go-openapi/strfmt v0.24.0 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect - github.com/go-playground/validator/v10 v10.19.0 // indirect + github.com/go-playground/validator/v10 v10.28.0 // indirect github.com/go-task/slim-sprig/v3 v3.0.0 // indirect + github.com/go-viper/mapstructure/v2 v2.4.0 // indirect github.com/google/go-cmp v0.7.0 // indirect github.com/google/pprof v0.0.0-20250302191652-9094ed2288e7 // indirect github.com/google/uuid v1.6.0 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect - github.com/hashicorp/go-retryablehttp v0.7.7 // indirect + github.com/hashicorp/go-retryablehttp v0.7.8 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/leodido/go-urn v1.4.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect @@ -36,14 +36,16 @@ require ( github.com/oklog/ulid v1.3.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/stretchr/objx v0.5.2 // indirect - go.mongodb.org/mongo-driver v1.14.0 // indirect - golang.org/x/crypto v0.36.0 // indirect - golang.org/x/net v0.37.0 // indirect - golang.org/x/sys v0.31.0 // indirect - golang.org/x/text v0.23.0 // indirect - golang.org/x/tools v0.31.0 // indirect + go.mongodb.org/mongo-driver v1.17.4 // indirect + go.yaml.in/yaml/v2 v2.4.3 // indirect + golang.org/x/crypto v0.42.0 // indirect + golang.org/x/net v0.44.0 // indirect + golang.org/x/sys v0.36.0 // indirect + golang.org/x/text v0.29.0 // indirect + golang.org/x/tools v0.36.0 // indirect google.golang.org/protobuf v1.36.5 // indirect gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect + sigs.k8s.io/yaml v1.6.0 // indirect ) diff --git a/code-engine-cos2cos/go.sum b/code-engine-cos2cos/go.sum index 1245e5fc..144d8bdc 100644 --- a/code-engine-cos2cos/go.sum +++ b/code-engine-cos2cos/go.sum @@ -1,7 +1,11 @@ github.com/IBM/go-sdk-core/v5 v5.18.5 h1:g0JRl3sYXJczB/yuDlrN6x22LJ6jIxhp0Sa4ARNW60c= github.com/IBM/go-sdk-core/v5 v5.18.5/go.mod h1:KonTFRR+8ZSgw5cxBSYo6E4WZoY1+7n1kfHM82VcjFU= +github.com/IBM/go-sdk-core/v5 v5.21.0 h1:DUnYhvC4SoC8T84rx5omnhY3+xcQg/Whyoa3mDPIMkk= +github.com/IBM/go-sdk-core/v5 v5.21.0/go.mod h1:Q3BYO6iDA2zweQPDGbNTtqft5tDcEpm6RTuqMlPcvbw= github.com/IBM/ibm-cos-sdk-go v1.12.1 h1:pWs5c5/j9PNJE1lIQhYtzpdCxu2fpvCq9PHs6/nDjyI= github.com/IBM/ibm-cos-sdk-go v1.12.1/go.mod h1:7vmUThyAq4+AD1eEyGZi90ir06Z9YhsEzLBsdGPfcqo= +github.com/IBM/ibm-cos-sdk-go v1.12.3 h1:kMIs1nfPY0UXAMcW6bq8O9WOd6KgqiDBnIMd0e/fMqA= +github.com/IBM/ibm-cos-sdk-go v1.12.3/go.mod h1:dt13UIqJRgfGIlSNlnf17JmAXlBXhfTgXLKV3as8ABk= github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -15,12 +19,18 @@ github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4 github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= +github.com/gabriel-vasile/mimetype v1.4.10 h1:zyueNbySn/z8mJZHLt6IPw0KoZsiQNszIpU+bX4+ZK0= +github.com/gabriel-vasile/mimetype v1.4.10/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s= github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-openapi/errors v0.21.0 h1:FhChC/duCnfoLj1gZ0BgaBmzhJC2SL/sJr8a2vAobSY= github.com/go-openapi/errors v0.21.0/go.mod h1:jxNTMUxRCKj65yb/okJGEtahVd7uvWnuWfj53bse4ho= +github.com/go-openapi/errors v0.22.3 h1:k6Hxa5Jg1TUyZnOwV2Lh81j8ayNw5VVYLvKrp4zFKFs= +github.com/go-openapi/errors v0.22.3/go.mod h1:+WvbaBBULWCOna//9B9TbLNGSFOfF8lY9dw4hGiEiKQ= github.com/go-openapi/strfmt v0.22.1 h1:5Ky8cybT4576C6Ffc+8gYji/wRXCo6Ozm8RaWjPI6jc= github.com/go-openapi/strfmt v0.22.1/go.mod h1:OfVoytIXJasDkkGvkb1Cceb3BPyMOwk1FgmyyEw7NYg= +github.com/go-openapi/strfmt v0.24.0 h1:dDsopqbI3wrrlIzeXRbqMihRNnjzGC+ez4NQaAAJLuc= +github.com/go-openapi/strfmt v0.24.0/go.mod h1:Lnn1Bk9rZjXxU9VMADbEEOo7D7CDyKGLsSKekhFr7s4= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= @@ -29,9 +39,13 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4= github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= +github.com/go-playground/validator/v10 v10.28.0 h1:Q7ibns33JjyW48gHkuFT91qX48KG0ktULL6FgHdG688= +github.com/go-playground/validator/v10 v10.28.0/go.mod h1:GoI6I1SjPBh9p7ykNE/yj3fFYbyDOpwMn5KXd+m2hUU= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= +github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= +github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= @@ -54,13 +68,13 @@ github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB1 github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU= github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk= +github.com/hashicorp/go-retryablehttp v0.7.8 h1:ylXZWnqa7Lhqpk0L1P1LzDtGcCR0rPVUrx/c8Unxc48= +github.com/hashicorp/go-retryablehttp v0.7.8/go.mod h1:rjiScheydd+CxvumBsIrFKlx3iS0jrZ7LvzFGFmuKbw= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= -github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= -github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= @@ -84,10 +98,14 @@ github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= github.com/onsi/ginkgo/v2 v2.23.0 h1:FA1xjp8ieYDzlgS5ABTpdUDB7wtngggONc8a7ku2NqQ= github.com/onsi/ginkgo/v2 v2.23.0/go.mod h1:zXTP6xIp3U8aVuXN8ENK9IXRaTjFnpVB9mGmaSRvxnM= +github.com/onsi/ginkgo/v2 v2.23.3 h1:edHxnszytJ4lD9D5Jjc4tiDkPBZ3siDeJJkUZJJVkp0= +github.com/onsi/ginkgo/v2 v2.23.3/go.mod h1:zXTP6xIp3U8aVuXN8ENK9IXRaTjFnpVB9mGmaSRvxnM= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8= github.com/onsi/gomega v1.36.2/go.mod h1:DdwyADRjrc825LhMEkD76cHR5+pUnjhUN8GlHlRPHzY= +github.com/onsi/gomega v1.37.0 h1:CdEG8g0S133B4OswTDC/5XPSzE1OeP29QOioj2PID2Y= +github.com/onsi/gomega v1.37.0/go.mod h1:8D9+Txp43QWKhM24yyOBEdpkzN8FvJyAwecBgsU4KU0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -96,14 +114,22 @@ github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/ github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80= go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= +go.mongodb.org/mongo-driver v1.17.4 h1:jUorfmVzljjr0FLzYQsGP8cgN/qzzxlY9Vh0C9KFXVw= +go.mongodb.org/mongo-driver v1.17.4/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= +go.yaml.in/yaml/v2 v2.4.3 h1:6gvOSjQoTB3vt1l+CU+tSyi/HOjfOjRLJ4YwYZGwRO0= +go.yaml.in/yaml/v2 v2.4.3/go.mod h1:zSxWcmIDjOzPXpjlTTbAsKokqkDNAVtZO0WOMiT90s8= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34= golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= +golang.org/x/crypto v0.42.0 h1:chiH31gIWm57EkTXpwnqf8qeuMUi0yekh6mT2AvFlqI= +golang.org/x/crypto v0.42.0/go.mod h1:4+rDnOTJhQCx2q7/j6rAN5XDw8kPjeaXEUR2eL94ix8= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= @@ -112,6 +138,8 @@ golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/ golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c= golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/net v0.44.0 h1:evd8IRDyfNBMBTTY5XRF1vaZlD+EmWx6x8PkhR04H/I= +golang.org/x/net v0.44.0/go.mod h1:ECOoLqd5U3Lhyeyo/QDCEVQ4sNgYsqvCZ722XogGieY= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -127,15 +155,20 @@ golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k= +golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= +golang.org/x/text v0.29.0 h1:1neNs90w9YzJ9BocxfsQNHKuAT4pkghyXc4nhZ6sJvk= +golang.org/x/text v0.29.0/go.mod h1:7MhJOA9CD2qZyOKYazxdYMF85OwPdEr9jTtBpO7ydH4= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.31.0 h1:0EedkvKDbh+qistFTd0Bcwe/YLh4vHwWEkiI0toFIBU= golang.org/x/tools v0.31.0/go.mod h1:naFTU+Cev749tSJRXJlna0T3WxKvb1kWEx15xA4SdmQ= +golang.org/x/tools v0.36.0/go.mod h1:WBDiHKJK8YgLHlcQPYQzNCkUxUypCaa5ZegCVutKm+s= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -162,3 +195,5 @@ gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +sigs.k8s.io/yaml v1.6.0 h1:G8fkbMSAFqgEFgh4b1wmtzDnioxFCUgTZhlbj5P9QYs= +sigs.k8s.io/yaml v1.6.0/go.mod h1:796bPqUfzR/0jLAl6XjHl3Ck7MiyVv8dbTdyT3/pMf4=