diff --git a/.azure-pipelines/_template_build_solution.yml b/.azure-pipelines/_template_build_solution.yml index a511778..d0cfc2f 100644 --- a/.azure-pipelines/_template_build_solution.yml +++ b/.azure-pipelines/_template_build_solution.yml @@ -33,9 +33,9 @@ jobs: - script: python -u automation/scripts/utils_build_parameter_file.py --environments ${{ parameters.environments }} displayName: 'Build parameter files' env: - TENANT_ID: $(SPN_TENANT_ID) - CLIENT_ID: $(SPN_CLIENT_ID) - CLIENT_SECRET: $(SPN_CLIENT_SECRET) + FAB_TENANT_ID: $(SPN_TENANT_ID) + FAB_SPN_CLIENT_ID: $(SPN_CLIENT_ID) + FAB_SPN_CLIENT_SECRET: $(SPN_CLIENT_SECRET) - task: UseDotNet@2 inputs: diff --git a/.azure-pipelines/_template_release_solution.yml b/.azure-pipelines/_template_release_solution.yml index 7aba9b0..1d1457c 100644 --- a/.azure-pipelines/_template_release_solution.yml +++ b/.azure-pipelines/_template_release_solution.yml @@ -45,9 +45,9 @@ jobs: - script: python -u automation/scripts/fabric_release.py --environment ${{ parameters.environment }} --repo_path "$(Pipeline.Workspace)/solution" displayName: 'Run Fabric release script' env: - TENANT_ID: $(SPN_TENANT_ID) - CLIENT_ID: $(SPN_CLIENT_ID) - CLIENT_SECRET: $(SPN_CLIENT_SECRET) + FAB_TENANT_ID: $(SPN_TENANT_ID) + FAB_SPN_CLIENT_ID: $(SPN_CLIENT_ID) + FAB_SPN_CLIENT_SECRET: $(SPN_CLIENT_SECRET) - task: Bash@3 name: generate_connectionstring @@ -64,10 +64,10 @@ jobs: CONNECTION_STRING=$(cat conn.txt) echo "##vso[task.setvariable variable=connection_string]$CONNECTION_STRING" env: - TENANT_ID: $(SPN_TENANT_ID) - CLIENT_ID: $(SPN_CLIENT_ID) - CLIENT_SECRET: $(SPN_CLIENT_SECRET) - + FAB_TENANT_ID: $(SPN_TENANT_ID) + FAB_SPN_CLIENT_ID: $(SPN_CLIENT_ID) + FAB_SPN_CLIENT_SECRET: $(SPN_CLIENT_SECRET) + - task: Bash@3 displayName: 'Install sqlpackage' inputs: diff --git a/.github/workflows/_template_build_solution.yml b/.github/workflows/_template_build_solution.yml index 1806727..f3bc258 100644 --- a/.github/workflows/_template_build_solution.yml +++ b/.github/workflows/_template_build_solution.yml @@ -1,13 +1,8 @@ name: _Build Solution on: - workflow_call: - inputs: - environments: - required: true - type: string - description: 'A commaseperated list of environments to build for. E.g., "dev,tst,prd"' - + workflow_call + jobs: build: runs-on: windows-latest @@ -17,10 +12,10 @@ jobs: PYTHONUNBUFFERED: 1 DOTNET_SYSTEM_CONSOLE_ALLOW_ANSI_COLOR_REDIRECTION: true VSO_FORCE_UTF8_OUTPUT: true - TENANT_ID: ${{ secrets.SPN_TENANT_ID }} - CLIENT_ID: ${{ secrets.SPN_CLIENT_ID }} - CLIENT_SECRET: ${{ secrets.SPN_CLIENT_SECRET }} - + FAB_TENANT_ID: ${{ secrets.SPN_TENANT_ID }} + FAB_SPN_CLIENT_ID: ${{ secrets.SPN_CLIENT_ID }} + FAB_SPN_CLIENT_SECRET: ${{ secrets.SPN_CLIENT_SECRET }} + steps: - name: Checkout repository uses: actions/checkout@v4 diff --git a/.github/workflows/_template_release_solution.yml b/.github/workflows/_template_release_solution.yml index 332299d..e645848 100644 --- a/.github/workflows/_template_release_solution.yml +++ b/.github/workflows/_template_release_solution.yml @@ -16,9 +16,9 @@ jobs: PYTHONUNBUFFERED: 1 DOTNET_SYSTEM_CONSOLE_ALLOW_ANSI_COLOR_REDIRECTION: true VSO_FORCE_UTF8_OUTPUT: true - TENANT_ID: ${{ secrets.SPN_TENANT_ID }} - CLIENT_ID: ${{ secrets.SPN_CLIENT_ID }} - CLIENT_SECRET: ${{ secrets.SPN_CLIENT_SECRET }} + FAB_TENANT_ID: ${{ secrets.SPN_TENANT_ID }} + FAB_SPN_CLIENT_ID: ${{ secrets.SPN_CLIENT_ID }} + FAB_SPN_CLIENT_SECRET: ${{ secrets.SPN_CLIENT_SECRET }} steps: @@ -52,8 +52,8 @@ jobs: shell: bash run: | python automation/scripts/generate_connection_string.py \ - --client_id "${CLIENT_ID}" \ - --client_secret "${CLIENT_SECRET}" \ + --client_id "${FAB_SPN_CLIENT_ID}" \ + --client_secret "${FAB_SPN_CLIENT_SECRET}" \ --environment "${{ inputs.environment }}" \ --layer "Core" \ --database "Metadata" \ diff --git a/.github/workflows/feature_fabric_branch.yml b/.github/workflows/feature_fabric_branch.yml index a980868..02754a0 100644 --- a/.github/workflows/feature_fabric_branch.yml +++ b/.github/workflows/feature_fabric_branch.yml @@ -13,9 +13,9 @@ jobs: PYTHONUNBUFFERED: 1 DOTNET_SYSTEM_CONSOLE_ALLOW_ANSI_COLOR_REDIRECTION: true VSO_FORCE_UTF8_OUTPUT: true - TENANT_ID: ${{ secrets.SPN_TENANT_ID }} - CLIENT_ID: ${{ secrets.SPN_CLIENT_ID }} - CLIENT_SECRET: ${{ secrets.SPN_CLIENT_SECRET }} + FAB_TENANT_ID: ${{ secrets.SPN_TENANT_ID }} + FAB_SPN_CLIENT_ID: ${{ secrets.SPN_CLIENT_ID }} + FAB_SPN_CLIENT_SECRET: ${{ secrets.SPN_CLIENT_SECRET }} steps: - name: Checkout code diff --git a/.github/workflows/feature_fabric_cleanup.yml b/.github/workflows/feature_fabric_cleanup.yml index cca6975..892f127 100644 --- a/.github/workflows/feature_fabric_cleanup.yml +++ b/.github/workflows/feature_fabric_cleanup.yml @@ -16,9 +16,9 @@ jobs: PYTHONUNBUFFERED: 1 DOTNET_SYSTEM_CONSOLE_ALLOW_ANSI_COLOR_REDIRECTION: true VSO_FORCE_UTF8_OUTPUT: true' - TENANT_ID: ${{ secrets.SPN_TENANT_ID }} - CLIENT_ID: ${{ secrets.SPN_CLIENT_ID }} - CLIENT_SECRET: ${{ secrets.SPN_CLIENT_SECRET }} + FAB_TENANT_ID: ${{ secrets.SPN_TENANT_ID }} + FAB_SPN_CLIENT_ID: ${{ secrets.SPN_CLIENT_ID }} + FAB_SPN_CLIENT_SECRET: ${{ secrets.SPN_CLIENT_SECRET }} steps: - name: Checkout code diff --git a/.github/workflows/feature_fabric_update.yml b/.github/workflows/feature_fabric_update.yml index e764a03..4d1cd42 100644 --- a/.github/workflows/feature_fabric_update.yml +++ b/.github/workflows/feature_fabric_update.yml @@ -15,9 +15,9 @@ jobs: PYTHONUNBUFFERED: 1 DOTNET_SYSTEM_CONSOLE_ALLOW_ANSI_COLOR_REDIRECTION: true VSO_FORCE_UTF8_OUTPUT: true - TENANT_ID: ${{ secrets.SPN_TENANT_ID }} - CLIENT_ID: ${{ secrets.SPN_CLIENT_ID }} - CLIENT_SECRET: ${{ secrets.SPN_CLIENT_SECRET }} + FAB_TENANT_ID: ${{ secrets.SPN_TENANT_ID }} + FAB_SPN_CLIENT_ID: ${{ secrets.SPN_CLIENT_ID }} + FAB_SPN_CLIENT_SECRET: ${{ secrets.SPN_CLIENT_SECRET }} if: github.event.created == false steps: diff --git a/.github/workflows/run_fabric_job.yml b/.github/workflows/run_fabric_job.yml index ad3038d..f0f74b3 100644 --- a/.github/workflows/run_fabric_job.yml +++ b/.github/workflows/run_fabric_job.yml @@ -14,9 +14,9 @@ jobs: runs-on: ubuntu-latest env: - TENANT_ID: ${{ secrets.SPN_TENANT_ID }} - CLIENT_ID: ${{ secrets.SPN_CLIENT_ID }} - CLIENT_SECRET: ${{ secrets.SPN_CLIENT_SECRET }} + FAB_TENANT_ID: ${{ secrets.SPN_TENANT_ID }} + FAB_SPN_CLIENT_ID: ${{ secrets.SPN_CLIENT_ID }} + FAB_SPN_CLIENT_SECRET: ${{ secrets.SPN_CLIENT_SECRET }} steps: - name: Checkout code diff --git a/.github/workflows/solution_cleanup.yml b/.github/workflows/solution_cleanup.yml index d12639b..a2c3a4c 100644 --- a/.github/workflows/solution_cleanup.yml +++ b/.github/workflows/solution_cleanup.yml @@ -24,9 +24,9 @@ jobs: PYTHONUNBUFFERED: 1 DOTNET_SYSTEM_CONSOLE_ALLOW_ANSI_COLOR_REDIRECTION: true VSO_FORCE_UTF8_OUTPUT: true - TENANT_ID: ${{ secrets.SPN_TENANT_ID }} - CLIENT_ID: ${{ secrets.SPN_CLIENT_ID }} - CLIENT_SECRET: ${{ secrets.SPN_CLIENT_SECRET }} + FAB_TENANT_ID: ${{ secrets.SPN_TENANT_ID }} + FAB_SPN_CLIENT_ID: ${{ secrets.SPN_CLIENT_ID }} + FAB_SPN_CLIENT_SECRET: ${{ secrets.SPN_CLIENT_SECRET }} steps: - name: Checkout diff --git a/automation/scripts/fabric_feature_maintainance.py b/automation/scripts/fabric_feature_maintainance.py index f965f16..545b437 100644 --- a/automation/scripts/fabric_feature_maintainance.py +++ b/automation/scripts/fabric_feature_maintainance.py @@ -6,9 +6,9 @@ # Get arguments parser = argparse.ArgumentParser(description="Fabric feature maintainance arguments") -parser.add_argument("--tenant_id", required=False, default=os.environ.get('TENANT_ID'), help="Azure Active Directory (Microsoft Entra ID) tenant ID used for authenticating with Fabric APIs. Defaults to the TENANT_ID environment variable.") -parser.add_argument("--client_id", required=False, default=os.environ.get('CLIENT_ID'), help="Client ID of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_ID environment variable.") -parser.add_argument("--client_secret", required=False, default=os.environ.get('CLIENT_SECRET'), help="Client secret of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_SECRET environment variable.") +parser.add_argument("--tenant_id", required=False, default=os.environ.get('FAB_TENANT_ID'), help="Azure Active Directory (Microsoft Entra ID) tenant ID used for authenticating with Fabric APIs. Defaults to the TENANT_ID environment variable.") +parser.add_argument("--client_id", required=False, default=os.environ.get('FAB_SPN_CLIENT_ID'), help="Client ID of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_ID environment variable.") +parser.add_argument("--client_secret", required=False, default=os.environ.get('FAB_SPN_CLIENT_SECRET'), help="Client secret of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_SECRET environment variable.") parser.add_argument("--branch_name", required=False, default=default_branch_name, help="The name of the Git feature branch to operate on. Used for workspace setup, automation, and CI/CD logic. Defaults to a predefined variable `branch_name`.") parser.add_argument("--action", required=False, default="create", help="Action to perform: `create` to set up a new feature branch and workspace, `update` to synchronize repos and workspaces, `delete` to clean up. Default is `create`.") diff --git a/automation/scripts/fabric_release.py b/automation/scripts/fabric_release.py index 819a9d9..d763b85 100644 --- a/automation/scripts/fabric_release.py +++ b/automation/scripts/fabric_release.py @@ -26,9 +26,9 @@ parser.add_argument("--item_types", required=False, default=default_item_types_in_scope, help="Comma seperated list of item types in scope. Must match Fabric ItemTypes exactly.") parser.add_argument("--repo_path", required=False, default=default_solution_path, help="Path the the solution repository where items are stored.") parser.add_argument("--unpublish_items", required=False, default=True, help="Whether to unpublish orphan items that are no longer in the repository. Default is True.") -parser.add_argument("--tenant_id", required=False, default=os.environ.get('TENANT_ID'), help="Azure Active Directory (Microsoft Entra ID) tenant ID used for authenticating with Fabric APIs. Defaults to the TENANT_ID environment variable.") -parser.add_argument("--client_id", required=False, default=os.environ.get('CLIENT_ID'), help="Client ID of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_ID environment variable.") -parser.add_argument("--client_secret", required=False, default=os.environ.get('CLIENT_SECRET'), help="Client secret of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_SECRET environment variable.") +parser.add_argument("--tenant_id", required=False, default=os.environ.get('FAB_TENANT_ID'), help="Azure Active Directory (Microsoft Entra ID) tenant ID used for authenticating with Fabric APIs. Defaults to the TENANT_ID environment variable.") +parser.add_argument("--client_id", required=False, default=os.environ.get('FAB_SPN_CLIENT_ID'), help="Client ID of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_ID environment variable.") +parser.add_argument("--client_secret", required=False, default=os.environ.get('FAB_SPN_CLIENT_SECRET'), help="Client secret of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_SECRET environment variable.") args = parser.parse_args() tenant_id = args.tenant_id diff --git a/automation/scripts/fabric_setup.py b/automation/scripts/fabric_setup.py index a9498df..b91d4bd 100644 --- a/automation/scripts/fabric_setup.py +++ b/automation/scripts/fabric_setup.py @@ -18,9 +18,9 @@ parser = argparse.ArgumentParser(description="Fabric IaC setup arguments") parser.add_argument("--environment", required=False, default=default_environment, help="Environment to setup. Default is dev.") parser.add_argument("--action", required=False, default=action, help="Indicates the action to perform (Create/Delete). Default is Create.") -parser.add_argument("--tenant_id", required=False, default=os.environ.get('TENANT_ID'), help="Azure Active Directory (Microsoft Entra ID) tenant ID used for authenticating with Fabric APIs. Defaults to the TENANT_ID environment variable.") -parser.add_argument("--client_id", required=False, default=os.environ.get('CLIENT_ID'), help="Client ID of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_ID environment variable.") -parser.add_argument("--client_secret", required=False, default=os.environ.get('CLIENT_SECRET'), help="Client secret of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_SECRET environment variable.") +parser.add_argument("--tenant_id", required=False, default=os.environ.get('FAB_TENANT_ID'), help="Azure Active Directory (Microsoft Entra ID) tenant ID used for authenticating with Fabric APIs. Defaults to the TENANT_ID environment variable.") +parser.add_argument("--client_id", required=False, default=os.environ.get('FAB_SPN_CLIENT_ID'), help="Client ID of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_ID environment variable.") +parser.add_argument("--client_secret", required=False, default=os.environ.get('FAB_SPN_CLIENT_SECRET'), help="Client secret of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_SECRET environment variable.") parser.add_argument("--github_pat", required=False, default=os.environ.get('GITHUB_PAT'), help="Github Personal Access Token. Used when source control provider is GitHub. Defaults to the FAB_GITHUB_PAT environment variable.") args = parser.parse_args() diff --git a/automation/scripts/generate_connection_string.py b/automation/scripts/generate_connection_string.py index a8d8c0b..32503e0 100644 --- a/automation/scripts/generate_connection_string.py +++ b/automation/scripts/generate_connection_string.py @@ -11,9 +11,9 @@ parser.add_argument("--layer", required=True, help="Name of layer to generate connection string for.") parser.add_argument("--database", required=True, help="Name of database to generate connection string for.") parser.add_argument('--output_file', required=True, help="Path to output file where the connection string will be saved.") -parser.add_argument("--tenant_id", required=False, default=os.environ.get('TENANT_ID'), help="Azure Active Directory (Microsoft Entra ID) tenant ID used for authenticating with Fabric APIs. Defaults to the TENANT_ID environment variable.") -parser.add_argument("--client_id", required=False, default=os.environ.get('CLIENT_ID'), help="Client ID of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_ID environment variable.") -parser.add_argument("--client_secret", required=False, default=os.environ.get('CLIENT_SECRET'), help="Client secret of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_SECRET environment variable.") +parser.add_argument("--tenant_id", required=False, default=os.environ.get('FAB_TENANT_ID'), help="Azure Active Directory (Microsoft Entra ID) tenant ID used for authenticating with Fabric APIs. Defaults to the TENANT_ID environment variable.") +parser.add_argument("--client_id", required=False, default=os.environ.get('FAB_SPN_CLIENT_ID'), help="Client ID of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_ID environment variable.") +parser.add_argument("--client_secret", required=False, default=os.environ.get('FAB_SPN_CLIENT_SECRET'), help="Client secret of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_SECRET environment variable.") args = parser.parse_args() environment = args.environment diff --git a/automation/scripts/locale_maintain_feature.py b/automation/scripts/locale_maintain_feature.py deleted file mode 100644 index df8fa9c..0000000 --- a/automation/scripts/locale_maintain_feature.py +++ /dev/null @@ -1,33 +0,0 @@ -#--------------------------------------------------------- -# Default values -#--------------------------------------------------------- -branch_name = "feature/MyFeature" # Full name of branch, e.g. "feature/MyFeatureBranch" -action = "create" # Options: create/delete. Defaults to 'create' if not set. - -#--------------------------------------------------------- -# Main script -#--------------------------------------------------------- -import subprocess, os -import modules.auth_functions as authfunc - -env_credentials = authfunc.get_environment_credentials("dev", os.path.join(os.path.dirname(__file__), f'../../credentials/')) -script_path = 'automation/scripts/fabric_feature_maintainance.py' - -args = ["--tenant_id", env_credentials.get("tenant_id"), - "--client_id", env_credentials.get("client_id"), - "--client_secret", env_credentials.get("client_secret"), - "--action", action, - "--branch_name", branch_name,] - -process = subprocess.Popen(['python', script_path] + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) - -# Print the output line by line as it is generated -for line in process.stdout: - print(line, end='') # `end=''` prevents adding extra newlines - -# Optionally, you can also print stderr (errors) as they occur -for line in process.stderr: - print(f"Error: {line}", end='') - -# Wait for the process to complete and get the exit code -process.wait() \ No newline at end of file diff --git a/automation/scripts/utils_build_parameter_file.py b/automation/scripts/utils_build_parameter_file.py index a3ad720..7b1f7a7 100644 --- a/automation/scripts/utils_build_parameter_file.py +++ b/automation/scripts/utils_build_parameter_file.py @@ -13,9 +13,9 @@ parser = argparse.ArgumentParser(description="Fabric IaC setup arguments") parser.add_argument("--environments", required=False, default="dev,tst,prd", help="Comma seperated list of environments to include in parameter file.") parser.add_argument("--source_environment", required=False, default="dev", help="The source environment serving as source for the values being replaced.") -parser.add_argument("--tenant_id", required=False, default=os.environ.get('TENANT_ID'), help="Azure Active Directory (Microsoft Entra ID) tenant ID used for authenticating with Fabric APIs. Defaults to the TENANT_ID environment variable.") -parser.add_argument("--client_id", required=False, default=os.environ.get('CLIENT_ID'), help="Client ID of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_ID environment variable.") -parser.add_argument("--client_secret", required=False, default=os.environ.get('CLIENT_SECRET'), help="Client secret of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_SECRET environment variable.") +parser.add_argument("--tenant_id", required=False, default=os.environ.get('FAB_TENANT_ID'), help="Azure Active Directory (Microsoft Entra ID) tenant ID used for authenticating with Fabric APIs. Defaults to the TENANT_ID environment variable.") +parser.add_argument("--client_id", required=False, default=os.environ.get('FAB_SPN_CLIENT_ID'), help="Client ID of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_ID environment variable.") +parser.add_argument("--client_secret", required=False, default=os.environ.get('FAB_SPN_CLIENT_SECRET'), help="Client secret of the Azure AD application registered for accessing Fabric APIs. Defaults to the CLIENT_SECRET environment variable.") args = parser.parse_args() environments = args.environments.split(",") diff --git a/presentations/ESPC Git Good - Best Practices for CICD - 20251118/Git Good - Best Practices for CICD and Collaboration in Microsoft Fabric (ESPC Webinar).pdf b/presentations/ESPC Git Good - Best Practices for CICD - 20251118/Git Good - Best Practices for CICD and Collaboration in Microsoft Fabric (ESPC Webinar).pdf new file mode 100644 index 0000000..7866bee Binary files /dev/null and b/presentations/ESPC Git Good - Best Practices for CICD - 20251118/Git Good - Best Practices for CICD and Collaboration in Microsoft Fabric (ESPC Webinar).pdf differ diff --git a/presentations/ESPC Git Good - Best Practices for CICD - 20251118/GitGood-QA.md b/presentations/ESPC Git Good - Best Practices for CICD - 20251118/GitGood-QA.md new file mode 100644 index 0000000..8181103 --- /dev/null +++ b/presentations/ESPC Git Good - Best Practices for CICD - 20251118/GitGood-QA.md @@ -0,0 +1,270 @@ +# Git Good - Q&A Follow-Up + +Thanks again to everyone who joined my **Git Good: Best Practices for +CI/CD and Collaboration in Microsoft Fabric** session! + +I really appreciate the engagement and the great questions in the chat - and also your patience when I ran a bit over time 🙏 + +As promised, here is the full Q&A from the session. + +------------------------------------------------------------------------ + +## Q&A + +### Question: +Would you sometimes split out prepare, model and present even further +per area or data product like `prepare_sales_dev` to ensure you can keep +better capacity scaling per data product? + +### Answer: +Not by default. I would always start small. That said, there are +situations where you may need to split the layers further. + +For storage, this could be due to governance policies where each source +must be stored separately in isolated landing lakehouses. For the +prepare layer, it could be due to completely separate data platforms +(e.g., using the same ingestion mechanism but different preparation +units). + +However, I would say that **80% of all solutions (if not more)** can be +covered by a simple layered separation. In most cases I see the model +layer being split based on **business unit or domain**. The same +typically goes for reports. + +------------------------------------------------------------------------ + +### Question: +Also curious to the question of Allan. Mostly because we see that up +until the semantic model layer we mostly see a central team working, +while after that it splits up into multiple domains typically. + +### Answer: +Exactly. And it very often also involves different personas. Data +engineers work on ingestion and data preparation, while data analysts +work on the semantic models and reports. Beyond that, report development +typically splits further across areas or departments, each working on +their own set of semantic models. + +------------------------------------------------------------------------ + +### Question: +How can we implement source control for Lakehouse SQL views or other +objects like security and schema in Microsoft Fabric, following best +practices? + +### Answer: +For lakehouses, there is currently **no built-in way** of deploying SQL +endpoint structures (like in a SQL project). Tables and files are also +**not tracked or versioned in Git**. + +So, as far as I know, you must handle this **customly**. There are a few +possible approaches: + +1. **Manage views inside your transformation process (recommended).** + For example, imagine you have a dimension table `dim_calendar` + prepared in a notebook that refreshes daily. As part of that + notebook, include a cell that generates the corresponding view + (e.g., in a `dim` schema such as `dim.Calendar`). You can even fully + automate this using: + - a default view pattern, + - metadata-driven view creation (on/off flags), + - a shared helper function that either generates default views or + creates custom SQL-based views. +2. **Use system views to extract and recreate objects.** Another + approach is to use a utility notebook to fetch schemas, views, etc., + from system views (`INFORMATION_SCHEMA.VIEWS`, + `INFORMATION_SCHEMA.SCHEMATA`, ...). Then recreate them in the + target environment. **Be careful**, views may reference tables + that don't yet exist. That's why I prefer to handle view creation + **inside the notebook logic** responsible for the table itself. + +------------------------------------------------------------------------ + +### Question: +I like the recipe approach. Is there a reason why you use this over +Terraform? Any comparisons that you've made? + +### Answer: +Thanks! 🙂 + +My starting point was created when there **was no Terraform provider for +Fabric**. I built wrappers around the REST APIs to handle many different +scenarios, including long-running operations, throttling, etc. When the +Fabric CLI was introduced, migrating was quite easy. + +Today it's more a mix of **policy and skillset**. + +If the question is specifically *"why Fabric CLI + Python scripts over +Terraform?"*, then: + +- **Fabric evolves faster than Terraform**, meaning REST + CLI always + exposes new features first. +- The Fabric CLI allows calling **any REST endpoint**, including + preview and even undocumented ones (with obvious risks). +- Item-level operations are much better supported in a **purpose-built + tool** like the CLI than via Terraform. + +That said, Terraform is still a great choice for **infrastructure +outside Fabric**, such as resource groups, capacities, Key Vault, and +private networking. + +And as a side note, if you're into AI and want to use **MCP +servers**, a CLI is also tailored towards that. + +------------------------------------------------------------------------ + +### Question: +I have two dev, two UAT, two pre-prod, and one prod environment. Since a +workspace can only be attached to one deployment pipeline, I cannot +attach prod to multiple pipelines or sync changes between dev +workspaces. How should I handle this? + +### Answer: +This is difficult to answer without knowing why your environments are +structured that way. If you're referring to **Fabric Deployment +Pipelines**, then I recommend looking into a more scalable, +enterprise-ready deployment pattern using **Azure DevOps pipelines or +GitHub Actions**, deploying via the **fabric-cicd** library. + +`fabric-cicd` enables highly flexible and complex deployment patterns +using a **configuration-based approach**. + +You can learn more here: https://microsoft.github.io/fabric-cicd/latest/ + +------------------------------------------------------------------------ + +### Question: +Are there scenarios where you would NOT use this approach? Where it +would be too big/complex? Or do you see this working for any size of +project/company? + +### Answer: +The short answer is **no** - or at least not if the solution is intended +to become an ongoing, scalable production solution. I would personally +use it even for small solutions. + +The exception is **POCs or experiments**. In those cases, I just do +**ClickOps**. 🙂 + +------------------------------------------------------------------------ + +### Question: +When multiple developers are working on different features +simultaneously, is it advisable to create dedicated developer workspaces +(like sandboxes) for feature development, mirroring dev? + +### Answer: +I would not create dedicated **developer** workspaces, but instead +create **feature** workspaces. This also enables team members to co-work +on a feature. + +------------------------------------------------------------------------ + +### Question: +Can you get feature workspaces back quickly, for example if a bug is +discovered? + +### Answer: +This relates to restoring deleted workspaces (and branches) after a +feature is closed and merged into main. + +Technically, yes. You *can* restore deleted workspaces. But I do +**not** consider that best practice. + +If a feature has been closed, approved, and merged into main, I would +always create a **new feature/bugfix branch + workspace** for the fix. + +It also depends on your branching strategy. In my session I mentioned +three scenarios. In scenario 2 (cherry-picking) and scenario 3 +(octopus merge), you almost never need to restore workspaces because +nothing moves into main (and into production) before it is fully tested +and signed off. + +------------------------------------------------------------------------ + +### Question: +How do you handle deploying connection IDs between environments? + +### Answer: +`fabric-cicd` is your lifesaver here. + +Using **parameterization** with dynamic replacement of item IDs, you can +completely control dependency references, such as switching +connection IDs between development, test, and production. + +In my current GitHub repo I don't use dynamic replacements but instead +generate the `parameter.yml` file automatically for each deployment, +ensuring all workspace IDs, connection IDs, etc., are correctly mapped +across environments. + +------------------------------------------------------------------------ + +### Question: +Is it possible to duplicate data of selected Lakehouses/Warehouses when +creating a feature workspace? + +### Answer: +I would generally *not* recommend having feature- or developer-specific +storage layers. This is a tricky subject in any data platform - not +just Fabric. + +Fabric's mix of Lakehouses, Warehouses, and shortcuts makes this even +more complex. + +To answer the question: **Yes, it is technically possible to clone +data**, but it requires custom implementation and comes with challenges +around retention, object selection, and cost/consumption. + +For Warehouses, you can create **zero-copy table clones**. For +Lakehouses, you can use **shortcuts**, or clone data manually if needed. + +------------------------------------------------------------------------ + +### Question: +How does the database metadata deployment/release process work if you're +using metadata-driven ETL (e.g., based on Azure SQL DB/Fabric DB)? + +### Answer: +If your metadata should reside in a relational store, I would always +choose a **Fabric SQL Database** for a Fabric solution. + +Using Azure SQL DB: - adds extra cost, - introduces unnecessary +complexity, - often requires VNET/network security policies, - may +require managed private endpoints for Fabric to communicate with Azure +SQL DB. + +Note: trial capacities are limited to **3 Fabric SQL Databases**, but +there is no limit in paid capacities. + +I am also exploring a **file-based metadata** approach using Data +Contracts (YAML), which offers better version-control support - but +this is still experimental. + +------------------------------------------------------------------------ + +### Question: +How to set up the dynamic connection for a semantic model so the model +in DEV refers to the DEV lakehouse, and the model in TEST/PROD refers to +their respective lakehouses? + +### Answer: +This is handled by **fabric-cicd** during deployments to test and +production. + +In development, your semantic models use a development connection. The +ID of this connection can be added manually, or as in my solution, +added automatically together with the corresponding IDs for test and +production. + +During deployment, the semantic model definition is updated with the +connection ID of the target environment. + +Another option is using **Tabular Editor 2 CLI** to update the +connection during deployment. + +I will be hosting a **Tabular Editor webinar on December 10th**, focused +specifically on CI/CD for semantic models (with some overlap with my Git +Good session). + +You can sign up here: +https://tabulareditor.com/resources/upcoming-and-on-demand-events/ci-cd-for-microsoft-fabric-and-power-bi diff --git a/presentations/ESPC Git Good - Best Practices for CICD - 20251118/README.md b/presentations/ESPC Git Good - Best Practices for CICD - 20251118/README.md new file mode 100644 index 0000000..da66d81 --- /dev/null +++ b/presentations/ESPC Git Good - Best Practices for CICD - 20251118/README.md @@ -0,0 +1,40 @@ +# ESPC Webinar: Git Good - Best Practices for CI/CD and Collaboration in Microsoft Fabric + +### Session Follow‑Up, Q&A, and Resources + +This folder contains the follow-up material from my session **"Git +Good: Best Practices for CI/CD and Collaboration in Microsoft Fabric."** from the 18th of November 2025. + +The session covered: +- Workspace and repository organization +- Infrastructure setup using Python and the Fabric CLI +- Git integration strategies in Microsoft Fabric +- Branching patterns for collaborative development +- CI/CD automation using **fabric-cicd** +- Deployment approaches, environment alignment, and best practices +- Real-world lessons learned from building Fabric data platforms + +A lot of excellent questions came in during the session - more than we had time to address live. + +To make sure nothing was lost, I collected **all questions + full answers** into a dedicated Q&A document. + +**[GitGood Q&A](GitGood-QA.md)** : A complete list of attendee questions with clarified and extended answers. + +------------------------------------------------------------------------ + +## 🔗 Additional Resources +- The presentation from the webinar: [View it here](Git%20Good%20-%20Best%20Practices%20for%20CICD%20and%20Collaboration%20in%20Microsoft%20Fabric%20(ESPC%20Webinar).pdf) +- Webinar recording: [View the recording here](https://www.sharepointeurope.com/webinars/git-good-best-practices-for-ci-cd-and-collaboration-in-microsoft-fabric/) +- My Blog: [peerinsights.emono.dk](https://peerinsights.emono.dk/) +- Fabric CLI: [microsoft.github.io/fabric-cli](https://microsoft.github.io/fabric-cli/) +- Fabric CI/CD Library: [microsoft.github.io/fabric-cicd](https://microsoft.github.io/fabric-cicd/latest/) +------------------------------------------------------------------------ + +## 🙌 Thanks + +A huge thank you to everyone who attended the session, asked questions, +and contributed to the discussion.\ +If you want to explore any topic further, feel free to connect or open +an issue/discussion here in the repository. + +Happy building - and Git Good! 🚀 \ No newline at end of file