<?xml version="1.0" encoding="UTF-8"?>
<source>
  <jobs>
    <job>
      <externalid>6547cb19-d1d</externalid>
      <Title>FBS Analytics Engineer</Title>
      <Description><![CDATA[<p>Our Client is seeking an experienced FBS Analytics Engineer to join their Technical Solutions Team in Business Insurance Analytics. The successful candidate will assist in the full-stack development of tools used by product managers, actuaries, and others to price insurance products.</p>
<p>Key responsibilities include:</p>
<ul>
<li>Assisting in the full-stack development of tools using Node.js, Angular, SQL queries and procedures, and Python</li>
<li>Validating data and calculations</li>
<li>Collaborating with cross-functional teams to deliver high-quality solutions</li>
</ul>
<p>To be successful in this role, you will need:</p>
<ul>
<li>Over 4 years of experience in software and data development using Python and SQL</li>
<li>Proficiency in Node.js, Angular, SQL queries and procedures, and Python</li>
<li>Excellent problem-solving skills and attention to detail</li>
<li>Strong communication and collaboration skills</li>
</ul>
<p>In return, you can expect a competitive compensation and benefits package, including a comprehensive benefits package, career development and training opportunities, flexible work arrangements, and a dynamic and inclusive work culture.</p>
<p>If you are a motivated and experienced professional looking to join a leading organisation in the insurance industry, please apply now.</p>
<p style="margin-top:24px;font-size:13px;color:#666;">XML job scraping automation by <a href="https://yubhub.co">YubHub</a></p>]]></Description>
      <Jobtype>full-time</Jobtype>
      <Experiencelevel>senior</Experiencelevel>
      <Workarrangement>hybrid</Workarrangement>
      <Salaryrange></Salaryrange>
      <Skills>SQL, Python, Node.js, Angular, Excel, Data Transformation, Data Quality Assurance, Pipeline Design and Development, Technical Communication</Skills>
      <Category>Engineering</Category>
      <Industry>Finance</Industry>
      <Employername>Capgemini</Employername>
      <Employerlogo>https://logos.yubhub.co/capgemini.com.png</Employerlogo>
      <Employerdescription>One of the United States&apos; largest insurers, providing a wide range of insurance and financial services products with gross written premiums well over US$25 Billion.</Employerdescription>
      <Employerwebsite>https://www.capgemini.com/us-en/about-us/who-we-are/</Employerwebsite>
      <Compensationcurrency></Compensationcurrency>
      <Compensationmin></Compensationmin>
      <Compensationmax></Compensationmax>
      <Applyto>https://jobs.workable.com/view/tjTcVqzWc1gHAc5C9S4gy1/hybrid-fbs-analytics-engineer-in-hyderabad-at-capgemini</Applyto>
      <Location>Hyderabad</Location>
      <Country></Country>
      <Postedate>2026-04-24</Postedate>
    </job>
    <job>
      <externalid>b8eed0eb-9f7</externalid>
      <Title>General Manager - Belgium</Title>
      <Description><![CDATA[<p>We are looking for an experienced General Manager who has successfully built and led sales and technical teams. The GM will be responsible for the overall country strategy and deliverables, in line with the company priorities. We are looking for an execution-focused operator who can propel ElevenLabs in the country.</p>
<p>Key responsibilities include:</p>
<ul>
<li>Developing and executing our Country strategy, including sales, customer delivery, KPI setting and hiring</li>
<li>Designing the optimal pipeline to reach our ambitious sales targets in the B2B side</li>
<li>Hiring the founding country team responsible for sales and implementations</li>
<li>Defining event priorities and where to invest for brand awareness</li>
<li>Representing ElevenLabs in events and media appearances</li>
<li>Working with the partner ecosystem to generate new collaborations and sales opportunities, in partnership with the global BD team</li>
<li>Ideating, owning and driving big bets in the country</li>
</ul>
<p>Requirements include:</p>
<ul>
<li>15+ years of experience leading teams and highly complex projects</li>
<li>Background in consulting and sales</li>
<li>Deep expertise in deploying technical products and/or solutions to enterprises</li>
<li>Owner mentality. Hands-on and self-starter who is keen to get their hands dirty to produce results</li>
<li>Outstanding communication and interpersonal skills</li>
<li>A hands-on leader who thinks outside the box and wants to generate results</li>
<li>Proven ability to build and scale high-performing teams</li>
<li>Native speaker of the local language</li>
</ul>
<p style="margin-top:24px;font-size:13px;color:#666;">XML job scraping automation by <a href="https://yubhub.co">YubHub</a></p>]]></Description>
      <Jobtype>full-time</Jobtype>
      <Experiencelevel>executive</Experiencelevel>
      <Workarrangement>remote</Workarrangement>
      <Salaryrange></Salaryrange>
      <Skills>AI, sales, technical teams, country strategy, KPI setting, hiring, pipeline design, brand awareness, partner ecosystem</Skills>
      <Category>Sales</Category>
      <Industry>Technology</Industry>
      <Employername>ElevenLabs</Employername>
      <Employerlogo>https://logos.yubhub.co/elevenlabs.io.png</Employerlogo>
      <Employerdescription>ElevenLabs is an AI research and product company transforming how we interact with technology. It serves millions of users and thousands of businesses, including fast-growing startups and large enterprises.</Employerdescription>
      <Employerwebsite>https://elevenlabs.io</Employerwebsite>
      <Compensationcurrency></Compensationcurrency>
      <Compensationmin></Compensationmin>
      <Compensationmax></Compensationmax>
      <Applyto>https://elevenlabs.io/careers/088d1dfb-4685-461a-b8b1-3f9681f4af12/general-manager-belgium</Applyto>
      <Location>Belgium</Location>
      <Country></Country>
      <Postedate>2026-04-24</Postedate>
    </job>
    <job>
      <externalid>76f61aca-ede</externalid>
      <Title>Software Engineer, Human Data Interface</Title>
      <Description><![CDATA[<p><strong>About the Role</strong></p>
<p>As a Software Engineer on Anthropic&#39;s Human Data Interfaces team, you&#39;ll own the architecture and execution of our data collection pipelines, designing systems that are both performant at scale and resilient to the rapidly changing needs of our research teams.</p>
<p><strong>Responsibilities</strong></p>
<ul>
<li>Architect and build data collection pipelines that support rapid iteration, balancing data quality and system maintainability</li>
<li>Think deeply about the experience of the crowdworkers and vendors using these systems, building interfaces that are clear, efficient, and lead to high-quality data</li>
<li>Collaborate closely with research teams to understand evolving data needs and iterate quickly on collection methods</li>
<li>Partner with our Human Data Operations team to understand the end-to-end data workflow and design interfaces that make their jobs easier</li>
<li>Prioritize and juggle multiple workstreams, making trade-off decisions in a fast-moving environment where research priorities can shift quickly</li>
</ul>
<p><strong>You May Be a Good Fit If You:</strong></p>
<ul>
<li>Believe that advanced AI systems could have a transformative effect on the world and are interested in helping make sure that transformation goes well</li>
<li>Are a strong full-stack engineer with broad experience across the stack</li>
<li>Are very good at building internal tools, including working with users of the tools to understand their needs</li>
<li>Thrive in fast-moving environments where you need to balance speed of iteration with long-term system health</li>
<li>Are a quick study,this team sits at the intersection of a large number of different complex technical systems that you&#39;ll need to understand (at a high level) to be effective</li>
</ul>
<p><strong>Strong Candidates May Also Have:</strong></p>
<ul>
<li>Experience building human data labelling interfaces, human-in-the-loop systems, or data collection pipelines</li>
<li>Familiarity with how preference data and reward models are used in AI model training</li>
<li>Experience working with researchers who are internal users/customers</li>
<li>Background in building, and improving the user-experience of user-facing applications, particularly those involving complex UI interactions or annotation workflows</li>
<li>Strong instincts around system design , building things that evolve gracefully as requirements change</li>
<li>Experience influencing technical and product direction on a team</li>
</ul>
<p><strong>Logistics</strong></p>
<ul>
<li>Minimum education: Bachelor’s degree or an equivalent combination of education, training, and/or experience</li>
<li>Required field of study: A field relevant to the role as demonstrated through coursework, training, or professional experience</li>
<li>Minimum years of experience: Years of experience required will correlate with the internal job level requirements for the position</li>
<li>Location-based hybrid policy: Currently, we expect all staff to be in one of our offices at least 25% of the time. However, some roles may require more time in our offices.</li>
<li>Visa sponsorship: We do sponsor visas! However, we aren&#39;t able to successfully sponsor visas for every role and every candidate. But if we make you an offer, we will make every reasonable effort to get you a visa, and we retain an immigration lawyer to help with this.</li>
</ul>
<p><strong>Benefits</strong></p>
<ul>
<li>Competitive compensation and benefits</li>
<li>Optional equity donation matching</li>
<li>Generous vacation and parental leave</li>
<li>Flexible working hours</li>
<li>Lovely office space in which to collaborate with colleagues</li>
</ul>
<p><strong>How to Apply</strong></p>
<p>If you&#39;re interested in this role, please submit your application through our website. We look forward to hearing from you!</p>
<p style="margin-top:24px;font-size:13px;color:#666;">XML job scraping automation by <a href="https://yubhub.co">YubHub</a></p>]]></Description>
      <Jobtype>full-time</Jobtype>
      <Experiencelevel>mid</Experiencelevel>
      <Workarrangement>hybrid</Workarrangement>
      <Salaryrange>$320,000-$405,000 USD</Salaryrange>
      <Skills>full-stack engineering, data collection pipeline design, human data labelling interfaces, human-in-the-loop systems, data collection pipelines, preference data and reward models, AI model training, researcher collaboration, user experience design, system design</Skills>
      <Category>Engineering</Category>
      <Industry>Technology</Industry>
      <Employername>Anthropic</Employername>
      <Employerlogo>https://logos.yubhub.co/anthropic.com.png</Employerlogo>
      <Employerdescription>Anthropic is a public benefit corporation that creates reliable, interpretable, and steerable AI systems.</Employerdescription>
      <Employerwebsite>https://www.anthropic.com/</Employerwebsite>
      <Compensationcurrency></Compensationcurrency>
      <Compensationmin></Compensationmin>
      <Compensationmax></Compensationmax>
      <Applyto>https://job-boards.greenhouse.io/anthropic/jobs/5109273008</Applyto>
      <Location>San Francisco, CA | New York City, NY</Location>
      <Country></Country>
      <Postedate>2026-04-18</Postedate>
    </job>
    <job>
      <externalid>1b773e5c-b51</externalid>
      <Title>IT Systems Engineer, Corporate Systems &amp; Infrastructure</Title>
      <Description><![CDATA[<p>About the role ---------------- The Corporate Infrastructure team builds and operates the platform layer the rest of IT Engineering runs on , cloud infrastructure hosting our internal services, the CI/CD that ships IT&#39;s own code, the observability stack across the corporate environment, and the cross-system automation that wires together tools never designed to talk to each other.</p>
<p>You&#39;ll build deployment pipelines and internal tooling that let IT Engineering ship like a product team. You&#39;ll define SLOs for corporate services, build the monitoring to know when we&#39;re missing them, and run on-call for the things you deploy. You&#39;ll partner with our network and AV engineers as their infrastructure counterpart , automating physical-world systems, building the telemetry that tells us an office is degraded before someone files a ticket. The scope is broad and the team is deliberately small, which means you&#39;ll need depth across cloud, CI, and observability, strong judgment about where to invest, and a bias toward infrastructure-as-code over heroic manual fixes.</p>
<p>Responsibilities ---------------</p>
<ul>
<li>Build and operate the cloud infrastructure that hosts IT&#39;s internal services</li>
<li>Design CI/CD pipelines that let IT Engineering ship through code review and automated testing</li>
<li>Own observability for corporate infrastructure , monitoring, alerting, dashboards, and SLOs</li>
<li>Write cross-system automation to integrate third-party systems and internal services</li>
<li>Partner with network, audiovisual, and physical security to deliver robust infrastructure solutions</li>
<li>Build internal tools , CLIs, bots, dashboards , that make other IT engineers faster</li>
<li>Run on-call for corporate infrastructure with post-incident reviews that drive durable fixes</li>
<li>Deploy infrastructure as code</li>
</ul>
<p>Requirements ------------</p>
<ul>
<li>8+ years building secure IT systems in complex environments</li>
<li>Excel at solving ambiguous problems with multiple stakeholders</li>
<li>Communicate technical concepts clearly to any audience</li>
<li>View IT Engineering as requiring product engineering rigor</li>
<li>Successfully deliver complex projects from conception to production</li>
<li>Write clear documentation as a natural part of your workflow</li>
<li>Have shipped Infrastructure as Code in production , Terraform or similar, with modules and state you maintained</li>
<li>Have run services with SLOs, on-call rotations, and post-incident reviews</li>
<li>Have built internal platforms or tooling that other engineers depend on</li>
</ul>
<p>Strong candidates may also -------------------------------</p>
<ul>
<li>Have transformed traditional IT operations into engineering-driven organizations</li>
<li>Have built strong partnerships with Security and Engineering teams</li>
<li>Practice modern development methods (code reviews, testing, CI/CD)</li>
<li>Work effectively in distributed teams</li>
<li>Have experience with ECS, Kubernetes or other container orchestration for internal services</li>
<li>Have automated physical-world infrastructure deployment (e.g., network configuration, office technology, physical security systems)</li>
<li>Have worked with enterprise integration or workflow automation platforms (e.g., Workato, n8n, Tines, or equivalents)</li>
</ul>
<p>Technical Skills ----------------</p>
<ul>
<li>Python, golang, etc</li>
<li>Terraform and Infrastructure as Code</li>
<li>Cloud platforms (AWS, GCP, Azure)</li>
<li>CI/CD pipeline design</li>
<li>Observability tooling (e.g., Prometheus, Grafana, Datadog, Honeycomb, or equivalent)</li>
<li>Linux systems administration</li>
<li>Strong networking skills</li>
<li>Configuration management</li>
</ul>
<p>Experience Level: senior Employment Type: full-time Workplace Type: remote Category: Engineering Industry: Technology Salary Range: $275,000-$325,000 USD Required Skills:</p>
<ul>
<li>Python</li>
<li>Terraform</li>
<li>Cloud platforms</li>
<li>CI/CD pipeline design</li>
<li>Observability tooling</li>
<li>Linux systems administration</li>
<li>Strong networking skills</li>
<li>Configuration management</li>
</ul>
<p>Preferred Skills:</p>
<ul>
<li>golang</li>
<li>ECS</li>
<li>Kubernetes</li>
<li>Enterprise integration or workflow automation platforms</li>
</ul>
<p style="margin-top:24px;font-size:13px;color:#666;">XML job scraping automation by <a href="https://yubhub.co">YubHub</a></p>]]></Description>
      <Jobtype>full-time</Jobtype>
      <Experiencelevel>senior</Experiencelevel>
      <Workarrangement>remote</Workarrangement>
      <Salaryrange>$275,000-$325,000 USD</Salaryrange>
      <Skills>Python, Terraform, Cloud platforms, CI/CD pipeline design, Observability tooling, Linux systems administration, Strong networking skills, Configuration management, golang, ECS, Kubernetes, Enterprise integration or workflow automation platforms</Skills>
      <Category>Engineering</Category>
      <Industry>Technology</Industry>
      <Employername>Anthropic</Employername>
      <Employerlogo>https://logos.yubhub.co/anthropic.com.png</Employerlogo>
      <Employerdescription>Anthropic is a public benefit corporation that aims to create reliable, interpretable, and steerable AI systems.</Employerdescription>
      <Employerwebsite>https://www.anthropic.com/</Employerwebsite>
      <Compensationcurrency></Compensationcurrency>
      <Compensationmin></Compensationmin>
      <Compensationmax></Compensationmax>
      <Applyto>https://job-boards.greenhouse.io/anthropic/jobs/4887952008</Applyto>
      <Location>Remote-Friendly (Travel-Required) | San Francisco, CA | Seattle, WA | New York City, NY</Location>
      <Country></Country>
      <Postedate>2026-04-18</Postedate>
    </job>
    <job>
      <externalid>65e7bd92-c31</externalid>
      <Title>FBS Analytics Engineer</Title>
      <Description><![CDATA[<p>FBS – Farmer Business Services is part of Farmers operations with the purpose of building a global approach to identifying, recruiting, hiring, and retaining top talent. By combining international reach with US expertise, we build diverse and high-performing teams that are equipped to thrive in today’s competitive marketplace.</p>
<p>We believe that the foundation of every successful business lies in having the right people with the right skills. That is where we come in—helping Farmers build a winning team that delivers consistent and sustainable results.</p>
<p>Since we don’t have a local legal entity, we’ve partnered with Capgemini, which acts as the Employer of Record. Capgemini is responsible for managing local payroll and benefits.</p>
<p><strong>What to expect on your journey with us:</strong></p>
<ul>
<li>A solid and innovative company with a strong market presence</li>
</ul>
<ul>
<li>A dynamic, diverse, and multicultural work environment</li>
</ul>
<ul>
<li>Leaders with deep market knowledge and strategic vision</li>
</ul>
<ul>
<li>Continuous learning and development</li>
</ul>
<p><strong>Team Function</strong> The Direct modeling team is focused on creating models to guide enterprise marketing decision that will help to promote brand awareness as well as boost sales through direct channel.</p>
<p><strong>Role Description:</strong></p>
<p>This position plays a crucial role in the data ecosystem by iteratively transforming raw data into structured, high-quality datasets that are ready for analysis in partnership with data/decision scientists. The role primarily focuses on moderately complex business problems while receiving limited coaching and guidance from data leadership. The role combines the technical skills of a data engineer, the analytical mindset of a data analyst, and strong business acumen to ensure data is not only collected and stored efficiently but also made accessible and insightful for end users. In partnership with data/decision scientists, the position is responsible for end-to-end data workflow including data ingestion, transformation, modeling, and validation to enable data-driven decision-making across the organization. This position requires deep understanding of data engineering, business processes, and analytics principles as well as a proactive approach to solving complex data challenges.</p>
<p><strong>Essential Job Functions:</strong></p>
<p><strong>1) Data infrastructure development</strong>: Pipeline Design and Development; Architects and builds scalable data pipelines using modern ETL (Extract, Load, Transform) tools and frameworks such as dbt (Data Build Tool), Apache Airflow, or similar. Automates data ingestion processes from various sources including databases, APIs, and third party services. Data Storage and Management - Designs and implements data warehousing solutions using platforms like Snowflake, Redshift, or BigQuery. Optimizes storage solutions for performance, cost efficiency, and scalability.</p>
<p><strong>2) Data modeling and transformation:</strong> Data Modeling - Develops and maintains logical and physical data models to support business analytics. Creates and manages dimensional models, star/snowflake schemas, and other data structures. Data Transformation - Transforms raw data into clean, organized, and analytics-ready datasets using SQL, Python, or other relevant languages. Implements data transformation workflows to handle data cleansing, normalization, and enrichment. Data Quality Assurance - Conducts data validation and consistency checks to ensure the accuracy and reliability of data. Implements data quality monitoring and alerting mechanisms.</p>
<p><strong>3) Collaboration and stakeholder management:</strong> Cross-Functional Collaboration - Works closely with data analysts, data scientists, and business stakeholders to gather requirements and understand their data needs. Acts as a liaison between technical teams and business units to translate business requirements into technical specifications. Technical Communication - Clearly communicates complex technical concepts and data insights to non-technical stakeholders. Provides training and support to team members on data tools, best practices, and methodologies.</p>
<p><strong>Requirements</strong></p>
<ul>
<li>Over 4 years of experience in data development and analytics engineering using Python, SQL, DBT and Snowflake.</li>
</ul>
<ul>
<li>Bachelor’s degree in Computer Science, Data Science, Engineering or other Math or Technology related degrees.</li>
</ul>
<ul>
<li>Fluency in English</li>
</ul>
<p><strong>Software / Tools</strong></p>
<ul>
<li>SQL (must have)</li>
</ul>
<ul>
<li>Python (must have)</li>
</ul>
<ul>
<li>Snowflake (must have)</li>
</ul>
<ul>
<li>DBT (must have)</li>
</ul>
<p><strong>Other Critical Skills</strong></p>
<ul>
<li>Data Transformation</li>
</ul>
<ul>
<li>Data Quality Assurance</li>
</ul>
<ul>
<li>Pipeline Design and Development</li>
</ul>
<ul>
<li>Technical Communication</li>
</ul>
<ul>
<li>Independent work</li>
</ul>
<ul>
<li>Orientation to detail</li>
</ul>
<p><strong>Benefits</strong></p>
<p>This position comes with a competitive compensation and benefits package.</p>
<ul>
<li>A competitive salary and performance-based bonuses.</li>
</ul>
<ul>
<li>Comprehensive benefits package.</li>
</ul>
<ul>
<li>Flexible work arrangements (remote and/or office-based).</li>
</ul>
<ul>
<li>You will also enjoy a dynamic and inclusive work culture within a globally renowned group.</li>
</ul>
<ul>
<li>Private Health Insurance.</li>
</ul>
<ul>
<li>Paid Time Off.</li>
</ul>
<ul>
<li>Training &amp; Development opportunities in partnership with renowned companies.</li>
</ul>
<p style="margin-top:24px;font-size:13px;color:#666;">XML job scraping automation by <a href="https://yubhub.co">YubHub</a></p>]]></Description>
      <Jobtype>full-time</Jobtype>
      <Experiencelevel>senior</Experiencelevel>
      <Workarrangement>remote</Workarrangement>
      <Salaryrange></Salaryrange>
      <Skills>SQL, Python, Snowflake, DBT, Data Transformation, Data Quality Assurance, Pipeline Design and Development, Technical Communication, Independent work, Orientation to detail</Skills>
      <Category>Engineering</Category>
      <Industry>Technology</Industry>
      <Employername>Capgemini</Employername>
      <Employerlogo>https://logos.yubhub.co/view.com.png</Employerlogo>
      <Employerdescription>Capgemini is a global technology consulting and professional services company that provides a range of services including technology consulting, application services, and business process outsourcing.</Employerdescription>
      <Employerwebsite>https://jobs.workable.com</Employerwebsite>
      <Compensationcurrency></Compensationcurrency>
      <Compensationmin></Compensationmin>
      <Compensationmax></Compensationmax>
      <Applyto>https://jobs.workable.com/view/ws76jLTZQ1JKbCcs3CUiC4/remote-fbs-analytics-engineer-in-brazil-at-capgemini</Applyto>
      <Location></Location>
      <Country></Country>
      <Postedate>2026-03-09</Postedate>
    </job>
    <job>
      <externalid>5aabf454-ae0</externalid>
      <Title>FBS Analytics Engineer</Title>
      <Description><![CDATA[<p>FBS – Farmer Business Services is part of Farmers operations with the purpose of building a global approach to identifying, recruiting, hiring, and retaining top talent. By combining international reach with US expertise, we build diverse and high-performing teams that are equipped to thrive in today’s competitive marketplace.</p>
<p>We believe that the foundation of every successful business lies in having the right people with the right skills. That is where we come in—helping Farmers build a winning team that delivers consistent and sustainable results.</p>
<p>Since we don’t have a local legal entity, we’ve partnered with Capgemini, which acts as the Employer of Record. Capgemini is responsible for managing local payroll and benefits.</p>
<p><strong>What to expect on your journey with us:</strong></p>
<ul>
<li>A solid and innovative company with a strong market presence</li>
<li>A dynamic, diverse, and multicultural work environment</li>
<li>Leaders with deep market knowledge and strategic vision</li>
<li>Continuous learning and development</li>
</ul>
<p><strong>Team Function</strong></p>
<p>The Direct modeling team is focused on creating models to guide enterprise marketing decision that will help to promote brand awareness as well as boost sales through direct channel.</p>
<p><strong>Role Description:</strong></p>
<p>This position plays a crucial role in the data ecosystem by iteratively transforming raw data into structured, high-quality datasets that are ready for analysis in partnership with data/decision scientists. The role primarily focuses on moderately complex business problems while receiving limited coaching and guidance from data leadership. The role combines the technical skills of a data engineer, the analytical mindset of a data analyst, and strong business acumen to ensure data is not only collected and stored efficiently but also made accessible and insightful for end users. In partnership with data/decision scientists, the position is responsible for end-to-end data workflow including data ingestion, transformation, modeling, and validation to enable data-driven decision-making across the organization. This position requires deep understanding of data engineering, business processes, and analytics principles as well as a proactive approach to solving complex data challenges.</p>
<p><strong>Essential Job Functions:</strong></p>
<p><strong>1) Data infrastructure development</strong>: Pipeline Design and Development; Architects and builds scalable data pipelines using modern ETL (Extract, Load, Transform) tools and frameworks such as dbt (Data Build Tool), Apache Airflow, or similar. Automates data ingestion processes from various sources including databases, APIs, and third party services. Data Storage and Management - Designs and implements data warehousing solutions using platforms like Snowflake, Redshift, or BigQuery. Optimizes storage solutions for performance, cost efficiency, and scalability.</p>
<p><strong>2) Data modeling and transformation:</strong> Data Modeling - Develops and maintains logical and physical data models to support business analytics. Creates and manages dimensional models, star/snowflake schemas, and other data structures. Data Transformation - Transforms raw data into clean, organized, and analytics-ready datasets using SQL, Python, or other relevant languages. Implements data transformation workflows to handle data cleansing, normalization, and enrichment. Data Quality Assurance - Conducts data validation and consistency checks to ensure the accuracy and reliability of data. Implements data quality monitoring and alerting mechanisms.</p>
<p><strong>3) Collaboration and stakeholder management:</strong> Cross-Functional Collaboration - Works closely with data analysts, data scientists, and business stakeholders to gather requirements and understand their data needs. Acts as a liaison between technical teams and business units to translate business requirements into technical specifications. Technical Communication - Clearly communicates complex technical concepts and data insights to non-technical stakeholders. Provides training and support to team members on data tools, best practices, and methodologies.</p>
<p><strong>Requirements</strong></p>
<ul>
<li>Over 4 years of experience in data development and analytics engineering using Python, SQL, DBT and Snowflake.</li>
<li>Bachelor’s degree in Computer Science, Data Science, Engineering or other Math or Technology related degrees.</li>
<li>Fluency in English</li>
</ul>
<p><strong>Software / Tools</strong></p>
<ul>
<li>SQL (must have)</li>
<li>Python (must have)</li>
<li>Snowflake (must have)</li>
<li>DBT (must have)</li>
</ul>
<p><strong>Other Critical Skills</strong></p>
<ul>
<li>Data Transformation</li>
<li>Data Quality Assurance</li>
<li>Pipeline Design and Development</li>
<li>Technical Communication</li>
<li>Independent work</li>
<li>Orientation to detail</li>
</ul>
<p><strong>Benefits</strong></p>
<p>This position comes with a competitive compensation and benefits package.</p>
<ul>
<li>A competitive salary and performance-based bonuses.</li>
<li>Comprehensive benefits package.</li>
<li>Flexible work arrangements (remote and/or office-based).</li>
<li>You will also enjoy a dynamic and inclusive work culture within a globally renowned group.</li>
<li>Private Health Insurance.</li>
<li>Paid Time Off.</li>
<li>Training &amp; Development opportunities in partnership with renowned companies.</li>
</ul>
<p style="margin-top:24px;font-size:13px;color:#666;">XML job scraping automation by <a href="https://yubhub.co">YubHub</a></p>]]></Description>
      <Jobtype>full-time</Jobtype>
      <Experiencelevel>mid</Experiencelevel>
      <Workarrangement>remote</Workarrangement>
      <Salaryrange></Salaryrange>
      <Skills>SQL, Python, Snowflake, DBT, Data Transformation, Data Quality Assurance, Pipeline Design and Development, Technical Communication, Independent work, Orientation to detail</Skills>
      <Category>Engineering</Category>
      <Industry>Technology</Industry>
      <Employername>Capgemini</Employername>
      <Employerlogo>https://logos.yubhub.co/view.com.png</Employerlogo>
      <Employerdescription>Capgemini is a global technology consulting and professional services company with a diverse collective of nearly 350,000 strategic and technological experts across more than 50 countries.</Employerdescription>
      <Employerwebsite>https://jobs.workable.com</Employerwebsite>
      <Compensationcurrency></Compensationcurrency>
      <Compensationmin></Compensationmin>
      <Compensationmax></Compensationmax>
      <Applyto>https://jobs.workable.com/view/htNwC3gPnBQ9oxedafiBav/remote-fbs-analytics-engineer-in-mexico-at-capgemini</Applyto>
      <Location></Location>
      <Country></Country>
      <Postedate>2026-03-09</Postedate>
    </job>
    <job>
      <externalid>80561904-5df</externalid>
      <Title>IT Systems Engineer</Title>
      <Description><![CDATA[<p><strong>About the role</strong></p>
<p>The IT Engineering team at Anthropic builds, secures, and maintains the systems and infrastructure that enable every team to do their best work. We&#39;re seeking an experienced engineer with deep expertise across IT domains—from endpoint management and identity systems to cloud infrastructure, networking, and SaaS security—who can help design, build, and maintain the tools and processes that enable and empower Anthropic employees.</p>
<p>In this role, you&#39;ll work closely with Security, IT Operations, and Engineering to implement zero-trust architectures and defense-in-depth strategies that both enable the business and meet Anthropic&#39;s ambitious security objectives. You&#39;ll combine hands-on technical implementation with strategic planning and design, lead technical initiatives across multiple workstreams, mentor other engineers, and spearhead projects that enable Anthropic to scale even faster without compromising on security.</p>
<p><strong>Responsibilities</strong></p>
<ul>
<li>Design and implement secure IT infrastructure that scales with rapid growth</li>
<li>Build automation frameworks that eliminate manual processes and enable self-service</li>
<li>Lead technical initiatives spanning multiple teams and systems</li>
<li>Implement robust security controls that maintain user productivity</li>
<li>Drive architectural decisions through technical analysis and stakeholder input</li>
<li>Establish engineering best practices and drive adoption across the team</li>
<li>Create sustainable solutions for vulnerability management and compliance</li>
<li>Mentor team members and continuously expand your own expertise</li>
<li>Champion Infrastructure as Code and modern DevOps practices</li>
<li>Constantly seek new ways to amplify and automate IT with Claude</li>
</ul>
<p><strong>You may be a good fit if you</strong></p>
<ul>
<li>Have 8+ years building secure IT systems in complex environments</li>
<li>Excel at solving ambiguous problems with multiple stakeholders</li>
<li>Make evidence-based technical decisions while considering diverse perspectives</li>
<li>Communicate technical concepts clearly to any audience</li>
<li>View IT Engineering as requiring product engineering rigor</li>
<li>Successfully deliver complex projects from conception to production</li>
<li>Balance strong technical opinions with pragmatic solutions</li>
<li>Thrive when given ownership of challenging technical problems</li>
<li>Write clear documentation as a natural part of your workflow</li>
<li>Actively seek and incorporate feedback</li>
</ul>
<p><strong>Strong candidates may also</strong></p>
<ul>
<li>Have led initiatives requiring coordination across multiple teams</li>
<li>Transformed traditional IT operations into engineering-driven organizations</li>
<li>Built strong partnerships with Security and Engineering teams</li>
<li>Excel at translating between technical and business requirements</li>
<li>Practice modern development methods (code reviews, testing, CI/CD)</li>
<li>Have experience building consensus around technical standards</li>
<li>Demonstrate both mentoring others and learning from peers</li>
<li>Work effectively in distributed teams</li>
</ul>
<p><strong>Technical Skills</strong></p>
<ul>
<li>Python programming and/or advanced scripting experience</li>
<li>Terraform and Infrastructure as Code</li>
<li>AWS or GCP cloud platforms</li>
<li>Identity protocols (SAML, OAuth, SCIM)</li>
<li>Heterogeneous environment management (MacOS, ChromeOS, Linux)</li>
<li>Enterprise SaaS administration and security</li>
<li>Identity and access management</li>
<li>CI/CD pipeline design</li>
<li>Configuration management tools</li>
<li>Network security</li>
<li>Modern MDM platforms</li>
<li>Security frameworks and compliance</li>
</ul>
<p><strong>Deadline to apply</strong></p>
<p>None. Applications will be reviewed on a rolling basis.</p>
<p><strong>Logistics</strong></p>
<ul>
<li>Education requirements: We require at least a Bachelor&#39;s degree in a related field or equivalent experience.</li>
<li>Location-based hybrid policy: Currently, we expect all staff to be in one of our offices at least 25% of the time. However, some roles may require more time in our offices.</li>
<li>Visa sponsorship: We do sponsor visas! However, we aren&#39;t able to successfully sponsor visas for every role and every candidate. But if we make you an offer, we will make every reasonable effort to get you a visa, and we retain an immigration lawyer to help with this.</li>
</ul>
<p><strong>We encourage you to apply even if you do not believe you meet every single qualification.</strong></p>
<p>Not all strong candidates will meet every single qualification as listed. Research shows that people who identify as being from underrepresented groups are more prone to experiencing imposter syndrome and doubting the strength of their candidacy, so we urge you not to exclude yourself prematurely and to submit an application if you&#39;re interested in this work.</p>
<p style="margin-top:24px;font-size:13px;color:#666;">XML job scraping automation by <a href="https://yubhub.co">YubHub</a></p>]]></Description>
      <Jobtype>full-time</Jobtype>
      <Experiencelevel>senior</Experiencelevel>
      <Workarrangement>hybrid</Workarrangement>
      <Salaryrange>$225,000 - $275,000USD</Salaryrange>
      <Skills>Python programming, Terraform, AWS cloud platforms, Identity protocols, Heterogeneous environment management, Enterprise SaaS administration, Identity and access management, CI/CD pipeline design, Configuration management tools, Network security, Modern MDM platforms, Security frameworks and compliance, Python programming, Terraform, AWS cloud platforms, Identity protocols, Heterogeneous environment management, Enterprise SaaS administration, Identity and access management, CI/CD pipeline design, Configuration management tools, Network security, Modern MDM platforms, Security frameworks and compliance</Skills>
      <Category>Engineering</Category>
      <Industry>Technology</Industry>
      <Employername>Anthropic</Employername>
      <Employerlogo>https://logos.yubhub.co/anthropic.com.png</Employerlogo>
      <Employerdescription>Anthropic is a leading AI company that aims to create reliable, interpretable, and steerable AI systems. The company has a quickly growing team of researchers, engineers, policy experts, and business leaders working together to build beneficial AI systems.</Employerdescription>
      <Employerwebsite>https://job-boards.greenhouse.io</Employerwebsite>
      <Compensationcurrency></Compensationcurrency>
      <Compensationmin></Compensationmin>
      <Compensationmax></Compensationmax>
      <Applyto>https://job-boards.greenhouse.io/anthropic/jobs/4887952008</Applyto>
      <Location>San Francisco, CA, Seattle, WA, New York City, NY</Location>
      <Country></Country>
      <Postedate>2026-03-08</Postedate>
    </job>
  </jobs>
</source>