|
| 1 | +{ |
| 2 | + "cells": [ |
| 3 | + { |
| 4 | + "cell_type": "code", |
| 5 | + "execution_count": null, |
| 6 | + "metadata": { |
| 7 | + "vscode": { |
| 8 | + "languageId": "plaintext" |
| 9 | + } |
| 10 | + }, |
| 11 | + "outputs": [], |
| 12 | + "source": [ |
| 13 | + "import pyspark\n", |
| 14 | + "from pyspark.sql import SparkSession\n", |
| 15 | + "import os\n", |
| 16 | + "\n", |
| 17 | + "## DEFINE SENSITIVE VARIABLES\n", |
| 18 | + "NESSIE_SERVER_URI = \"http://nessie:19120/api/v2\"\n", |
| 19 | + "WAREHOUSE_BUCKET = \"s3://warehouse\"\n", |
| 20 | + "# use the IP of the minio server for this to be resolved\n", |
| 21 | + "MINIO_URI = \"http://minio:9000\"\n", |
| 22 | + "\n", |
| 23 | + "\n", |
| 24 | + "## Configurations for Spark Session\n", |
| 25 | + "conf = (\n", |
| 26 | + " pyspark.SparkConf()\n", |
| 27 | + " .setAppName('app_name')\n", |
| 28 | + " \t\t#packages\n", |
| 29 | + " .set('spark.jars.packages', 'org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.5.2,org.projectnessie.nessie-integrations:nessie-spark-extensions-3.5_2.12:0.91.3,software.amazon.awssdk:bundle:2.20.131,software.amazon.awssdk:url-connection-client:2.20.131')\n", |
| 30 | + " \t\t#SQL Extensions\n", |
| 31 | + " .set('spark.sql.extensions', 'org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,org.projectnessie.spark.extensions.NessieSparkSessionExtensions')\n", |
| 32 | + " \t\t#Configuring Catalog\n", |
| 33 | + " .set('spark.sql.catalog.nessie', 'org.apache.iceberg.spark.SparkCatalog')\n", |
| 34 | + " .set('spark.sql.catalog.nessie.uri', NESSIE_SERVER_URI)\n", |
| 35 | + " .set('spark.sql.catalog.nessie.ref', 'main')\n", |
| 36 | + " .set('spark.sql.catalog.nessie.authentication.type', 'NONE')\n", |
| 37 | + " .set('spark.sql.catalog.nessie.catalog-impl', 'org.apache.iceberg.nessie.NessieCatalog')\n", |
| 38 | + " .set(\"spark.sql.catalog.nessie.s3.endpoint\",MINIO_URI)\n", |
| 39 | + " .set('spark.sql.catalog.nessie.warehouse', WAREHOUSE_BUCKET)\n", |
| 40 | + " .set('spark.sql.catalog.nessie.io-impl', 'org.apache.iceberg.aws.s3.S3FileIO')\n", |
| 41 | + " .set('spark.jars.ivySettings','/path/to/custom-ivysettings.xml')\n", |
| 42 | + ")\n", |
| 43 | + "\n", |
| 44 | + "## Start Spark Session\n", |
| 45 | + "spark = SparkSession.builder.config(conf=conf).getOrCreate()\n", |
| 46 | + "print(\"Spark Running\")\n", |
| 47 | + "\n", |
| 48 | + "\n", |
| 49 | + "## TEST QUERY TO CHECK IT WORKING\n", |
| 50 | + "### Create TABLE\n", |
| 51 | + "spark.sql(\"CREATE TABLE nessie.example (name STRING) USING iceberg;\").show()\n", |
| 52 | + "### INSERT INTO TABLE\n", |
| 53 | + "spark.sql(\"INSERT INTO nessie.example VALUES ('Jai Guru!');\").show()\n", |
| 54 | + "### Query Table\n", |
| 55 | + "spark.sql(\"SELECT * FROM nessie.example;\").show()" |
| 56 | + ] |
| 57 | + } |
| 58 | + ], |
| 59 | + "metadata": { |
| 60 | + "language_info": { |
| 61 | + "name": "python" |
| 62 | + } |
| 63 | + }, |
| 64 | + "nbformat": 4, |
| 65 | + "nbformat_minor": 2 |
| 66 | +} |
0 commit comments