{"version":"0.1","company":{"name":"YubHub","url":"https://yubhub.co","jobsUrl":"https://yubhub.co/jobs/skill/sagemaker"},"x-facet":{"type":"skill","slug":"sagemaker","display":"Sagemaker","count":12},"x-feed-size-limit":100,"x-feed-sort":"enriched_at desc","x-feed-notice":"This feed contains at most 100 jobs (the most recently enriched). For the full corpus, use the paginated /stats/by-facet endpoint or /search.","x-generator":"yubhub-xml-generator","x-rights":"Free to redistribute with attribution: \"Data by YubHub (https://yubhub.co)\"","x-schema":"Each entry in `jobs` follows https://schema.org/JobPosting. YubHub-native raw fields carry `x-` prefix.","jobs":[{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_1bd2d1b2-84f"},"title":"Senior Machine Learning Researcher","description":"<p>We are seeking a senior machine learning researcher to join our Core AI team.</p>\n<p>As part of the team, you will help solve complex business problems by developing viable cutting-edge AI/ML solutions.</p>\n<p>You will develop and implement creative solutions that fundamentally transform business processes, delivering breakthrough improvements rather than incremental changes.</p>\n<p>You will work closely with other AI/ML researchers and engineers, SWEs, product owners/managers, and business stakeholders, and participate in the full lifecycle of solution development, including requirements gathering with business, experimentation and algorithmic exploration, development, and assistance with productization.</p>\n<p>Key Responsibilities:</p>\n<ul>\n<li>Work independently or as part of a team to help design and implement high accuracy and delightful user experience solutions utilizing ML, NLP, GenAI, Agentic technologies.</li>\n</ul>\n<ul>\n<li>Participate in all aspects of solution development, including ideation and requirement gathering with business stakeholders, experimentation and exploration to identify strong solution approaches, solution development, etc.</li>\n</ul>\n<ul>\n<li>Prototype, test, and iterate on novel AI models and approaches to solve complex business challenges.</li>\n</ul>\n<ul>\n<li>Collaborate with cross-functional teams to identify opportunities where AI can create significant business value, and transition solutions into production systems.</li>\n</ul>\n<ul>\n<li>Research and stay updated with the latest advancements in machine learning and AI technologies.</li>\n</ul>\n<ul>\n<li>Participate in code reviews, technical discussions, and knowledge sharing sessions.</li>\n</ul>\n<ul>\n<li>Communicate technical concepts and transformative ideas effectively to both technical and non-technical stakeholders.</li>\n</ul>\n<p>Required Skills &amp; Qualifications:</p>\n<ul>\n<li>Bachelor&#39;s with 10+ years, Master&#39;s with 7+ years, or PhD with 5+ years in Computer Science, Data Science, Machine Learning, or related field.</li>\n</ul>\n<ul>\n<li>Deep expertise and proven ability in developing high accuracy/value solutions to business problems in the NLP, Generative AI, Agentic AI, and/or ML space.</li>\n</ul>\n<ul>\n<li>Hands-on experience with data processing, experimentation, and exploration.</li>\n</ul>\n<ul>\n<li>Strong programming skills in Python.</li>\n</ul>\n<ul>\n<li>Experience with cloud platforms (AWS, Azure, GCP) for deploying ML solutions.</li>\n</ul>\n<ul>\n<li>Excellent problem-solving skills and attention to detail.</li>\n</ul>\n<ul>\n<li>Strong communication skills to collaborate with technical and non-technical stakeholders.</li>\n</ul>\n<ul>\n<li>Ability to work independently and collaboratively.</li>\n</ul>\n<p>Additional Preferred Skills &amp; Qualifications:</p>\n<ul>\n<li>Understanding of the financial markets, including experience with financial datasets, is strongly preferred.</li>\n</ul>\n<ul>\n<li>Experience with ML frameworks such as PyTorch, TensorFlow.</li>\n</ul>\n<ul>\n<li>Familiarity with MLOps practices and tools such as SageMaker, MLflow, or Airflow.</li>\n</ul>\n<ul>\n<li>Previous experience working in an Agile environment.</li>\n</ul>\n<p>Millennium pays a total compensation package which includes a base salary, discretionary performance bonus, and a comprehensive benefits package. The estimated base salary range for this position is $175,000 to $250,000, which is specific to New York and may change in the future.</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_1bd2d1b2-84f","directApply":true,"hiringOrganization":{"@type":"Organization","name":"IT - Artificial Intelligence","sameAs":"https://mlp.eightfold.ai","logo":"https://logos.yubhub.co/mlp.eightfold.ai.png"},"x-apply-url":"https://mlp.eightfold.ai/careers/job/755954012324","x-work-arrangement":"onsite","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$175,000 to $250,000","x-skills-required":["Python","Machine Learning","NLP","GenAI","Agentic technologies","Data processing","Experimentation","Exploration","Cloud platforms (AWS, Azure, GCP)","Problem-solving skills","Communication skills"],"x-skills-preferred":["PyTorch","TensorFlow","MLOps practices and tools (SageMaker, MLflow, Airflow)","Agile environment"],"datePosted":"2026-04-18T22:14:27.951Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"New York, New York, United States of America"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"Python, Machine Learning, NLP, GenAI, Agentic technologies, Data processing, Experimentation, Exploration, Cloud platforms (AWS, Azure, GCP), Problem-solving skills, Communication skills, PyTorch, TensorFlow, MLOps practices and tools (SageMaker, MLflow, Airflow), Agile environment","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":175000,"maxValue":250000,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_5c70414d-4e6"},"title":"Full‑Stack data engineer","description":"<p>We are seeking a highly self-sufficient, motivated engineer with strong full-stack data engineering skills to join our team. This is a remote/offshore role that requires autonomy, excellent communication, and the ability to deliver high-quality work with limited supervision while collaborating with a predominantly US-based team.</p>\n<p>You will build reliable, scalable data products and user experiences that power AI/ML modeling, agentic workflows, and reporting,working end-to-end from data ingestion and transformation through to UI. Our Python-based data platform is undergoing a major evolution toward a modern, cloud-native ELT architecture. We are standardizing on Snowflake as our central data platform and dbt as our core transformation framework, implementing scalable, maintainable ELT practices that simplify ingestion, modeling, and deployment.</p>\n<p>This role will be pivotal in independently designing and building robust data pipelines and semantic layers that directly power our AI and machine learning initiatives,delivering clean, reliable, and well-modeled data assets to our data science team for feature engineering, model training, and production inference. You will collaborate closely (primarily via remote channels) with data scientists and ML engineers to ensure our data ecosystem is optimized for experimentation speed, model performance, and seamless integration into downstream products and services.</p>\n<p>Key Responsibilities</p>\n<ul>\n<li>Remote collaboration &amp; communication: Operate effectively as an offshore member of a distributed team, proactively communicating status, risks, and blockers across time zones and coordinating overlap with US working hours as needed.</li>\n</ul>\n<ul>\n<li>Full-stack data engineering: Build across the entire stack, including data ingestion/acquisition and transformation, APIs, front-end components, and automated test suites, delivering production-grade solutions with minimal hand-holding.</li>\n</ul>\n<ul>\n<li>Autonomous delivery &amp; ownership: Take end-to-end ownership of features and projects,clarifying requirements, breaking work into milestones, estimating timelines, and delivering high-quality, well-documented solutions.</li>\n</ul>\n<ul>\n<li>Specification and design: Translate short- and long-term business requirements, architectural considerations, and competing timelines into clear, actionable technical specifications and design documents.</li>\n</ul>\n<ul>\n<li>Code quality: Write clean, maintainable, efficient code that adheres to evolving standards and quality processes, including unit tests and isolated integration tests in containerized environments.</li>\n</ul>\n<ul>\n<li>Continuous improvement: Contribute to agile practices and provide input on technical strategy, architectural decisions, and process improvements, continuously suggesting better tools, patterns, and automation.</li>\n</ul>\n<p>Required Skills &amp; Experience</p>\n<ul>\n<li>Professional experience: 5+ years in software engineering, with a full-stack background building complex, scalable data-engineering pipelines using data warehouse technology, SQL with dbt, Python, AWS with Terraform, and modern UI technologies.</li>\n</ul>\n<ul>\n<li>Modern data engineering: Strong experience with medallion data architecture patterns using data warehouse technologies (e.g., Snowflake), data transformation tooling (e.g., dbt), BI tooling, and NoSQL data marts (e.g., Elasticsearch/OpenSearch).</li>\n</ul>\n<ul>\n<li>Testing and QA: Solid understanding of unit testing, CI/CD automation, and quality assurance processes for both data pipeline testing and operational data quality tests.</li>\n</ul>\n<ul>\n<li>Remote work &amp; autonomy: Proven track record working in a remote or distributed environment, demonstrating self-motivation, reliable execution, and the ability to make sound technical decisions independently.</li>\n</ul>\n<ul>\n<li>Agile methodology: Working knowledge of Agile development practices and workflows (e.g., sprint planning, stand-ups, retrospectives) in a distributed team setting.</li>\n</ul>\n<ul>\n<li>Education: Bachelor’s or Master’s degree in Computer Science, Statistics, Informatics, Information Systems, or a related quantitative field.</li>\n</ul>\n<p>Preferred Skills &amp; Experience</p>\n<ul>\n<li>Machine learning and AI: Hands-on experience with large language models (LLMs) and agentic frameworks/workflows.</li>\n</ul>\n<ul>\n<li>Search and analytics: Familiarity with the ELK stack (Elasticsearch, Logstash, Kibana) for search and analytics solutions.</li>\n</ul>\n<ul>\n<li>Cloud expertise: Experience with AWS cloud services; familiarity with SageMaker; and CI/CD tooling such as GitHub Actions or Jenkins.</li>\n</ul>\n<ul>\n<li>Front-end expertise: Experience building user interfaces with Angular or a modern UI stack.</li>\n</ul>\n<ul>\n<li>Financial domain knowledge: Broad understanding of equities, fixed income, derivatives, futures, FX, and other financial instruments.</li>\n</ul>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_5c70414d-4e6","directApply":true,"hiringOrganization":{"@type":"Organization","name":"FIC & Risk Technology","sameAs":"https://mlp.eightfold.ai","logo":"https://logos.yubhub.co/mlp.eightfold.ai.png"},"x-apply-url":"https://mlp.eightfold.ai/careers/job/755955321460","x-work-arrangement":"remote","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":null,"x-skills-required":["Python","Snowflake","dbt","AWS","Terraform","modern UI technologies","data warehouse technology","SQL","unit testing","CI/CD automation","quality assurance processes"],"x-skills-preferred":["machine learning","AI","large language models","agentic frameworks","ELK stack","search and analytics solutions","cloud expertise","AWS cloud services","SageMaker","CI/CD tooling","front-end expertise","Angular","financial domain knowledge"],"datePosted":"2026-04-18T22:13:54.584Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Bangalore, Karnataka, India"}},"jobLocationType":"TELECOMMUTE","employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"Python, Snowflake, dbt, AWS, Terraform, modern UI technologies, data warehouse technology, SQL, unit testing, CI/CD automation, quality assurance processes, machine learning, AI, large language models, agentic frameworks, ELK stack, search and analytics solutions, cloud expertise, AWS cloud services, SageMaker, CI/CD tooling, front-end expertise, Angular, financial domain knowledge"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_25fd58ed-3c0"},"title":"(Senior) Data Scientist (all genders)","description":"<p>You will be part of the Business Intelligence department, which consists of the Data Science, Data Analytics, and Data Engineering teams.</p>\n<p>As a Senior Data Scientist, you will work on various topics such as rankings, recommendations, user segmentation, user lifetime value, business forecasts, etc. You will have access to our huge dataset and work in collaboration with stakeholders from various departments.</p>\n<p>Your objective is to build the best internal and external products for our customers. Holidu highly values a diverse and open environment with people from all over the world.</p>\n<p>This role is based in Munich with a hybrid setup.</p>\n<p><strong>Our Tech Stack</strong></p>\n<ul>\n<li>Flexible data science environment (Python, Sagemaker)</li>\n<li>Database: AWS Stack (Redshift, Athena, Glue, S3).</li>\n<li>Data Pipelines: Airflow, DBT.</li>\n<li>Data Visualization: Looker.</li>\n<li>Data Analytics: SQL, Python.</li>\n<li>Collaboration: Git.</li>\n</ul>\n<p><strong>Your role in this journey</strong></p>\n<p>You will play a pivotal role in the Business Intelligence team alongside data scientists, analysts, and engineers. Together, you will lead the development and enhancement of our company-wide machine learning strategy.</p>\n<ul>\n<li>Collaborate across various business departments to identify opportunities and solve critical business challenges using data science solutions.</li>\n<li>Build and optimize predictive models such as booking cancellation forecasts, churn predictions, pricing optimization, revenue forecasting and marketing channel allocation.</li>\n<li>Take models from conception to production, continuously monitor their performance, and iterate to enhance accuracy and efficiency.</li>\n<li>Interface with diverse business stakeholders, ensuring alignment between data science initiatives and company goals.</li>\n<li>Demonstrate leadership in data science projects, leveraging your expertise to drive measurable business impact.</li>\n</ul>\n<p><strong>Your backpack is filled with</strong></p>\n<ul>\n<li>3+ years of experience as a Data Scientist, with a proven track record of applying data science methodologies to solve complex business problems.</li>\n<li>A degree in Machine Learning, Computer Science, Mathematics, Physics, or a related field.</li>\n<li>Expertise in statistics, predictive analytics, machine learning techniques, and proficiency in tools like Python and SQL.</li>\n<li>Experience with Airflow and dbt is a plus.</li>\n<li>Strong understanding of business operations and experience collaborating with diverse stakeholders.</li>\n<li>Enthusiasm for data science and a drive to deliver world-class products that make a difference.</li>\n</ul>\n<p><strong>Our adventure includes</strong></p>\n<ul>\n<li>Impact: Shape the future of travel with products used by millions of guests and thousands of hosts.</li>\n<li>Learning: Grow professionally in a culture that thrives on curiosity and feedback.</li>\n<li>Great People: Join a team of smart, motivated and international colleagues who challenge and support each other.</li>\n<li>Technology: Work in a modern tech environment.</li>\n<li>Flexibility: Work a hybrid setup with 50% in-office time for collaboration, and spend up to 8 weeks a year from other inspiring locations.</li>\n<li>Perks on Top: Of course, we also offer travel benefits, gym discounts, and other perks to keep you energized.</li>\n</ul>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_25fd58ed-3c0","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Holidu Hosts GmbH","sameAs":"https://holidu.jobs.personio.com","logo":"https://logos.yubhub.co/holidu.jobs.personio.com.png"},"x-apply-url":"https://holidu.jobs.personio.com/job/2555141","x-work-arrangement":"hybrid","x-experience-level":"senior","x-job-type":"Full-time","x-salary-range":null,"x-skills-required":["Python","Sagemaker","AWS Stack","Airflow","DBT","Looker","SQL","Git"],"x-skills-preferred":[],"datePosted":"2026-04-18T22:13:07.588Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Munich, Germany"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"Python, Sagemaker, AWS Stack, Airflow, DBT, Looker, SQL, Git"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_6d7fadcc-6fa"},"title":"Data Scientist Computer Vision","description":"<p>At Bayer, we&#39;re seeking a talented Data Scientist with deep learning and machine learning expertise focused on image-based data to help shape the future of agriculture. In this role, you&#39;ll join a dynamic team that supports the development of Bayer Crop Science next-generation products by applying computer vision to automate critical processes across the Plant Biotechnology organisation.</p>\n<p>The primary responsibilities of this role are to:</p>\n<p>Solve real agricultural problems using deep learning and AI across image and other data modalities, translating complex models into tangible business and scientific impact.</p>\n<p>Design and implement end-to-end machine learning pipelines for computer vision use cases, including segmentation, classification, detection, and multi-task learning.</p>\n<p>Prototype, evaluate, and iterate on cutting-edge architectures such as CNNs, Vision Transformers, foundational and large-scale vision models, ensuring state-of-the-art performance.</p>\n<p>Optimize models for accuracy, robustness, and inference efficiency, including experimentation with hyperparameters, compression, and deployment-oriented optimisations.</p>\n<p>Independently build scalable data pipelines for training, validation, and evaluation, including data ingestion, augmentation strategies, and active learning loops.</p>\n<p>Collaborate cross-functionally with product, data, and software engineering teams to integrate models into production systems and deliver reliable, maintainable solutions.</p>\n<p>Contribute to MLOps practices, including model versioning, deployment, monitoring, and retraining workflows using modern tooling and cloud-based platforms.</p>\n<p>Build strong cross-functional relationships and actively engage with the broader Data Science Community to share best practices, align on standards, and co-create innovative solutions.</p>\n<p>Present clear, compelling, and validated stories about experiments, results, and recommendations to peers, senior management, and internal customers to drive strategic and operational decisions.</p>\n<p>We seek an incumbent who possesses the following:</p>\n<p>M.S. with 2+ years of experience or Ph.D. in Computer Science, Electrical Engineering, or a related field with a focus on machine learning or computer vision.</p>\n<p>Proficiency in Python and experience with deep learning frameworks such as PyTorch or TensorFlow.</p>\n<p>Hands-on experience with modern computer vision architectures including models such as ResNet, UNet, DeepLab, YOLO, SegFormer, SAM, and Vision Transformers.</p>\n<p>Strong background in handling large-scale datasets and creating custom datasets, for example using frameworks such as Hugging Face Datasets.</p>\n<p>Solid understanding of core machine learning concepts including loss functions, regularization, optimisation, and learning rate scheduling.</p>\n<p>Experience developing and deploying models using cloud-based ML platforms such as AWS SageMaker.</p>\n<p>Familiarity with Unix environments, including bash, file systems, and core utilities.</p>\n<p>Strong engineering practices including use of Git, Docker, CI/CD pipelines, modular codebase design, and unit testing.</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_6d7fadcc-6fa","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Bayer","sameAs":"https://talent.bayer.com","logo":"https://logos.yubhub.co/talent.bayer.com.png"},"x-apply-url":"https://talent.bayer.com/careers/job/562949976908666","x-work-arrangement":"onsite","x-experience-level":"mid","x-job-type":"full-time","x-salary-range":"$109,370.40 - $164,055.60","x-skills-required":["Python","PyTorch","TensorFlow","ResNet","UNet","DeepLab","YOLO","SegFormer","SAM","Vision Transformers","Hugging Face Datasets","AWS SageMaker","Git","Docker","CI/CD pipelines","modular codebase design","unit testing"],"x-skills-preferred":[],"datePosted":"2026-04-18T22:11:10.602Z","employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Manufacturing","skills":"Python, PyTorch, TensorFlow, ResNet, UNet, DeepLab, YOLO, SegFormer, SAM, Vision Transformers, Hugging Face Datasets, AWS SageMaker, Git, Docker, CI/CD pipelines, modular codebase design, unit testing","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":109370.4,"maxValue":164055.6,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_8b447835-74a"},"title":"Senior DataOps Engineer - Revenue Management (all genders)","description":"<p><strong>Your future team</strong></p>\n<p>You&#39;ll be part of our new Dynamic Pricing &amp; Revenue Management team, working alongside a Data Scientist and a Data Analyst. Together, you will work towards one core goal: helping hosts improve occupancy and earnings through a smart, dynamic, and data-driven pricing strategy.</p>\n<p><strong>Our Tech Stack</strong></p>\n<ul>\n<li>Data Storage &amp; Querying: S3, Redshift (with decentralized data sharing), Athena, and DuckDB.</li>\n<li>ML &amp; Model Serving: MLflow, SageMaker, and deployment APIs for model lifecycle management.</li>\n<li>Cloud &amp; DevOps: Terraform, Docker, Jenkins, and AWS EKS (Kubernetes) for scalable, resilient systems.</li>\n<li>Monitoring: ELK, Grafana, Looker, OpsGenie, and in-house tools for full visibility.</li>\n<li>Ingestion: Kafka-based event systems and tools like Airbyte and Fivetran for smooth third-party integrations.</li>\n<li>Automation &amp; AI: Extensive use of AI tools like Claude, Copilot, and Codex.</li>\n</ul>\n<p><strong>Your role in this journey</strong></p>\n<p>As a Data Ops Engineer – Revenue Management, you&#39;ll be the engineering backbone that enables our Data Scientists to move from experimentation to production. You bridge the gap between data science models and reliable, scalable production systems.</p>\n<p><strong>Responsibilities</strong></p>\n<ul>\n<li>Support model deployment and serving: help deploy pricing and demand models into production, building and maintaining APIs and serving infrastructure.</li>\n<li>Build and operate production pipelines: ensure data flows reliably from source to model to output, with proper monitoring and alerting.</li>\n<li>Collaborate cross-functionally: work closely with Data Scientists, Analysts, and Engineering teams to turn prototypes into production-ready solutions.</li>\n<li>Own infrastructure and tooling: set up and maintain the environments, CI/CD pipelines, and infrastructure that the team depends on.</li>\n<li>Ensure operational excellence by implementing monitoring, automated testing, and observability across the team&#39;s production systems.</li>\n<li>Migrate and productionize POC: turn experimental code into robust, maintainable Python applications.</li>\n<li>Ensure data quality, consistency, and documentation across revenue management metrics and datasets.</li>\n</ul>\n<p><strong>Benefits</strong></p>\n<ul>\n<li>Impact: Shape the future of travel with products used by millions of guests and thousands of hosts.</li>\n<li>Learning: Grow professionally in a culture that thrives on curiosity and feedback.</li>\n<li>Great People: Join a team of smart, motivated, and international colleagues who challenge and support each other.</li>\n<li>Technology: Work in a modern tech environment.</li>\n<li>Flexibility: Work a hybrid setup with 50% in-office time for collaboration, and spend up to 8 weeks a year from other inspiring locations.</li>\n<li>Perks on Top: Of course, we also offer travel benefits, gym discounts, and other perks to keep you energized.</li>\n</ul>\n<p><strong>Experience</strong></p>\n<ul>\n<li>4+ years of experience in Software Engineering, Data Engineering, DevOps, or MLOps.</li>\n<li>Strong hands-on skills in Python , you write clean, production-quality code.</li>\n<li>Experience with CI/CD, Docker, and infrastructure-as-code (e.g., Terraform).</li>\n<li>Familiarity with cloud platforms (AWS preferred) and deploying services in production.</li>\n<li>Exposure to or interest in ML model deployment (MLflow, SageMaker, or similar) is a strong plus.</li>\n<li>Desire to learn and use cutting-edge LLM tools and agents to improve your and the entire team&#39;s productivity.</li>\n<li>A proactive, hands-on mindset: you take ownership, spot problems, and drive solutions forward.</li>\n</ul>\n<p><strong>How to apply</strong></p>\n<p>If you&#39;re excited about this opportunity, please submit your application on our careers page!</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_8b447835-74a","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Holidu Hosts GmbH","sameAs":"https://holidu.jobs.personio.com","logo":"https://logos.yubhub.co/holidu.jobs.personio.com.png"},"x-apply-url":"https://holidu.jobs.personio.com/job/2597559","x-work-arrangement":"hybrid","x-experience-level":"senior","x-job-type":"Full-time","x-salary-range":null,"x-skills-required":["Python","CI/CD","Docker","Terraform","Cloud platforms (AWS preferred)","ML model deployment (MLflow, SageMaker, or similar)"],"x-skills-preferred":["AI tools like Claude, Copilot, and Codex","Data Storage & Querying (S3, Redshift, Athena, DuckDB)","ML & Model Serving (MLflow, SageMaker, deployment APIs)","Cloud & DevOps (Terraform, Docker, Jenkins, AWS EKS)","Monitoring (ELK, Grafana, Looker, OpsGenie, in-house tools)","Ingestion (Kafka-based event systems, Airbyte, Fivetran)"],"datePosted":"2026-04-18T22:09:42.352Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Munich, Germany"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"Python, CI/CD, Docker, Terraform, Cloud platforms (AWS preferred), ML model deployment (MLflow, SageMaker, or similar), AI tools like Claude, Copilot, and Codex, Data Storage & Querying (S3, Redshift, Athena, DuckDB), ML & Model Serving (MLflow, SageMaker, deployment APIs), Cloud & DevOps (Terraform, Docker, Jenkins, AWS EKS), Monitoring (ELK, Grafana, Looker, OpsGenie, in-house tools), Ingestion (Kafka-based event systems, Airbyte, Fivetran)"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_1338e7d1-ad8"},"title":"Cloud Machine Learning Engineer","description":"<p>At Hugging Face, we&#39;re on a journey to democratize good AI. We are building the fastest growing platform for AI builders. We are looking for a Cloud Machine Learning engineer responsible to help build machine learning solutions used by millions leveraging cloud technologies.</p>\n<p>You will work on integrating Hugging Face&#39;s open-source libraries like Transformers and Diffusers, with major cloud platforms or managed SaaS solutions. This role involves bridging and integrating models with different cloud providers, ensuring the models meet expected performance, designing and developing easy-to-use, secure, and robust developer experiences and APIs for our users, writing technical documentation, examples and notebooks to demonstrate new features, and sharing and advocating your work and the results with the community.</p>\n<p>The ideal candidate will have deep experience building with Hugging Face Technologies, including Transformers, Diffusers, Accelerate, PEFT, Datasets, expertise in Deep Learning Framework, preferably PyTorch, optionally XLA understanding, strong knowledge of cloud platforms like AWS and services like Amazon SageMaker, EC2, S3, CloudWatch and/or Azure and GCP equivalents, experience in building MLOps pipelines for containerizing models and solutions with Docker, familiarity with Typescript, Rust, and MongoDB, Kubernetes are helpful, ability to write clear documentation, examples and definition and work across the full product development lifecycle, and bonus experience with Svelte &amp; TailwindCSS.</p>\n<p>We are actively working to build a culture that values diversity, equity, and inclusivity. We are intentionally building a workplace where people feel respected and supported—regardless of who you are or where you come from. We believe this is foundational to building a great company and community.</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_1338e7d1-ad8","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Hugging Face","sameAs":"https://huggingface.co/"},"x-apply-url":"https://apply.workable.com/j/A3879724CD","x-work-arrangement":"remote","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":null,"x-skills-required":["Deep experience building with Hugging Face Technologies, including Transformers, Diffusers, Accelerate, PEFT, Datasets","Expertise in Deep Learning Framework, preferably PyTorch, optionally XLA understanding","Strong knowledge of cloud platforms like AWS and services like Amazon SageMaker, EC2, S3, CloudWatch and/or Azure and GCP equivalents","Experience in building MLOps pipelines for containerizing models and solutions with Docker","Familiarity with Typescript, Rust, and MongoDB, Kubernetes are helpful"],"x-skills-preferred":["Bonus experience with Svelte & TailwindCSS"],"datePosted":"2026-03-10T11:32:29.200Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"United States"}},"jobLocationType":"TELECOMMUTE","employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"Deep experience building with Hugging Face Technologies, including Transformers, Diffusers, Accelerate, PEFT, Datasets, Expertise in Deep Learning Framework, preferably PyTorch, optionally XLA understanding, Strong knowledge of cloud platforms like AWS and services like Amazon SageMaker, EC2, S3, CloudWatch and/or Azure and GCP equivalents, Experience in building MLOps pipelines for containerizing models and solutions with Docker, Familiarity with Typescript, Rust, and MongoDB, Kubernetes are helpful, Bonus experience with Svelte & TailwindCSS"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_af4253f8-57e"},"title":"Cloud Machine Learning Engineer - EMEA remote","description":"<p>At Hugging Face, we&#39;re on a journey to democratize good AI. We are building the fastest growing platform for AI builders with over 11 million users who collectively shared over 2M models, 700k datasets &amp; 600k apps. Our open-source libraries have more than 600k+ stars on Github. Hugging Face has become the most popular, community-driven project for training, sharing, and deploying the most advanced machine learning models.</p>\n<p>We are looking for a Cloud Machine Learning engineer responsible to help build machine learning solutions used by millions leveraging cloud technologies. You will work on integrating Hugging Face&#39;s open-source libraries like Transformers and Diffusers, with major cloud platforms or managed SaaS solutions.</p>\n<p>Responsibilities:</p>\n<ul>\n<li>Bridging and integrating 🤗 transformers/diffusers models with a different Cloud provider.</li>\n<li>Ensuring the above models meet the expected performance</li>\n<li>Designing &amp; Developing easy-to-use, secure, and robust Developer Experiences &amp; APIs for our users.</li>\n<li>Write technical documentation, examples and notebooks to demonstrate new features</li>\n<li>Sharing &amp; Advocating your work and the results with the community.</li>\n</ul>\n<p>About You\nYou&#39;ll enjoy working on this team if you have experience with and interest in deploying machine learning systems to production and build great developer experiences. The ideal candidate will have skills including:</p>\n<ul>\n<li>Deep experience building with Hugging Face Technologies, including Transformers, Diffusers, Accelerate, PEFT, Datasets</li>\n<li>Expertise in Deep Learning Framework, preferably PyTorch, optionally XLA understanding</li>\n<li>Strong knowledge of cloud platforms like AWS and services like Amazon SageMaker, EC2, S3, CloudWatch and/or Azure and GCP equivalents.</li>\n<li>Experience in building MLOps pipelines for containerizing models and solutions with Docker</li>\n<li>Familiarity with Typescript, Rust, and MongoDB, Kubernetes are helpful</li>\n<li>Ability to write clear documentation, examples and definition and work across the full product development lifecycle</li>\n<li>Bonus: Experience with Svelte &amp; TailwindCSS</li>\n</ul>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_af4253f8-57e","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Hugging Face","sameAs":"https://huggingface.co/"},"x-apply-url":"https://apply.workable.com/j/0CE9E806CC","x-work-arrangement":"remote","x-experience-level":"mid","x-job-type":"full-time","x-salary-range":null,"x-skills-required":["Deep experience building with Hugging Face Technologies, including Transformers, Diffusers, Accelerate, PEFT, Datasets","Expertise in Deep Learning Framework, preferably PyTorch, optionally XLA understanding","Strong knowledge of cloud platforms like AWS and services like Amazon SageMaker, EC2, S3, CloudWatch and/or Azure and GCP equivalents.","Experience in building MLOps pipelines for containerizing models and solutions with Docker","Familiarity with Typescript, Rust, and MongoDB, Kubernetes are helpful"],"x-skills-preferred":["Svelte & TailwindCSS"],"datePosted":"2026-03-10T11:32:17.703Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Paris"}},"jobLocationType":"TELECOMMUTE","employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"Deep experience building with Hugging Face Technologies, including Transformers, Diffusers, Accelerate, PEFT, Datasets, Expertise in Deep Learning Framework, preferably PyTorch, optionally XLA understanding, Strong knowledge of cloud platforms like AWS and services like Amazon SageMaker, EC2, S3, CloudWatch and/or Azure and GCP equivalents., Experience in building MLOps pipelines for containerizing models and solutions with Docker, Familiarity with Typescript, Rust, and MongoDB, Kubernetes are helpful, Svelte & TailwindCSS"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_6d7232ae-f49"},"title":"FBS Operations Analyst III","description":"<p><strong>Role Description</strong></p>\n<p>This role operates at the intersection of digital operations, customer experience, and AI-powered transformation. You will help drive the integration of intelligent automation, predictive analytics, and GenAI capabilities into digital service channels to improve customer experiences, enhance agent efficiency, and optimize operational performance.</p>\n<p><strong>Key Responsibilities</strong></p>\n<ul>\n<li>Identify and scope opportunities to utilize AI, machine learning, and automation to optimize digital experience operations and agent support processes.</li>\n<li>Collaborate with product owners to build AI-informed business cases, including ROI modeling for predictive experiences, intelligent routing, and automated troubleshooting.</li>\n<li>Manage AI-enabled dashboards and monitoring systems that surface real-time anomalies, experience dips, and emerging performance trends.</li>\n<li>Partner with CX, analytics, and engineering teams to run A/B tests leveraging AI-generated variants, measuring uplift and customer impact.</li>\n<li>Translate customer and agent insights using AI-powered analytics (e.g., text/sentiment analysis, pattern detection) into actionable recommendations for journey improvements.</li>\n<li>Apply GenAI capabilities to improve team operations, e.g., automated insights summaries, triaging, root cause analysis drafting, and workflow documentation.</li>\n</ul>\n<p><strong>Experience &amp; Education Requirements</strong></p>\n<ul>\n<li>Overall experience &gt; 6-8 years, 4+ years’ experience required with extensive relevant customer understanding/user research/consulting experience, packaging and delivering insights to influence change. 4+ years of combined experience required in analytics, performance reporting, and/or process improvement.</li>\n<li>Advanced project and process management experience. Agile preferred.</li>\n<li>Bachelor&#39;s degree preferred in Technology, Mathematics, Statistics, Business, or related field.</li>\n<li>Master’s degree preferred in Management, Analytics, Artificial Intelligence.</li>\n<li>Experience working with AI/ML-driven platforms or analytics tools (e.g., customer behavioral modeling, NLP pipelines, predictive analytics).</li>\n<li>Background or coursework in AI, machine learning, data science, or human-centered AI preferred.</li>\n<li>Demonstrated ability to translate technical AI concepts into business language for non-technical stakeholders.</li>\n<li>Availability to work in PST time zone.</li>\n</ul>\n<p><strong>Other Critical Skills</strong></p>\n<ul>\n<li>Familiarity with GenAI tools, conversational AI platforms, and AI-assisted productivity solutions.</li>\n<li>Ability to interpret outputs from models using techniques such as anomaly detection, clustering, and predictive scoring.</li>\n<li>Strong understanding of AI ethics, governance, and responsible AI frameworks.</li>\n<li>Comfortable working in environments where AI-driven decision automation informs prioritization and operational strategies.</li>\n<li>Possesses strong technical aptitude. Intermediate knowledge in using analytic tools such as Tableau and Microsoft Excel. Proficient in Microsoft Office including MS Word, Excel, Outlook, PowerPoint.</li>\n<li>Strong verbal communication and listening skills. Strong business acumen, with effective written and verbal communication skills.</li>\n<li>Ability to communicate, influence, and deliver with cross-functional teams and enterprise stakeholders. Ability to manage multiple projects with tight deadlines effectively with cross functional teams.</li>\n</ul>\n<p><strong>Software / Tool Skills</strong></p>\n<ul>\n<li>Experience with AI/ML or data intelligence platforms (e.g., Azure ML, AWS SageMaker, Google Vertex AI—basic exposure acceptable).</li>\n<li>Hands-on familiarity with GenAI copilots, prompt engineering concepts, or LLM-based experience analysis tools.</li>\n<li>Experience interpreting insights from NLP, AI-powered text analytics, or conversational AI systems.</li>\n<li>Hands on experience with user Feedback/Data analysis tools – Qualtrics, Fullstory, Mixpanel etc.</li>\n<li>Good proficiency or practical experience of A/B Testing tools.</li>\n<li>Agile tools such as Jira, Rally etc.</li>\n<li>Understanding of Contact center and CMS tools.</li>\n<li>Excel - Intermediate/Advanced. Proficiency in Microsoft 365</li>\n<li>Power BI (Highly Preferred)</li>\n</ul>\n<p><strong>Requirements</strong></p>\n<ul>\n<li>CX Operations</li>\n<li>Audit &amp; Governance</li>\n<li>Data Analysis</li>\n<li>Insights &amp; Predictive Analytics</li>\n<li>Root Cause Analysis</li>\n<li>Digital recommendations</li>\n<li>Competitive Analysis</li>\n<li>Personalization</li>\n<li>Agile framework</li>\n</ul>\n<p><strong>Benefits</strong></p>\n<ul>\n<li>Competitive compensation and benefits package:</li>\n</ul>\n<ol>\n<li>Competitive salary and performance-based bonuses</li>\n<li>Comprehensive benefits package</li>\n<li>Career development and training opportunities</li>\n<li>Flexible work arrangements (remote and/or office-based)</li>\n<li>Dynamic and inclusive work culture within a globally renowned group</li>\n<li>Private Health Insurance</li>\n<li>Pension Plan</li>\n<li>Paid Time Off</li>\n<li>Training &amp; Development</li>\n</ol>\n<p>Note: Benefits differ based on employee level.</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_6d7232ae-f49","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Capgemini","sameAs":"https://jobs.workable.com","logo":"https://logos.yubhub.co/view.com.png"},"x-apply-url":"https://jobs.workable.com/view/de1cZSoSoi8aHVV3fzA2Kq/hybrid-fbs-operations-analyst-iii-in-hyderabad-at-capgemini","x-work-arrangement":"hybrid","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":null,"x-skills-required":["AI/ML-driven platforms","Analytics tools","GenAI tools","Conversational AI platforms","AI-assisted productivity solutions","Tableau","Microsoft Excel","Microsoft Office","Qualtrics","Fullstory","Mixpanel","A/B Testing tools","Jira","Rally","Contact center and CMS tools","Power BI"],"x-skills-preferred":["Azure ML","AWS SageMaker","Google Vertex AI","LLM-based experience analysis tools","NLP","AI-powered text analytics","Conversational AI systems","Excel","Microsoft 365"],"datePosted":"2026-03-09T17:01:14.513Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Hyderabad, Telangana, India"}},"employmentType":"FULL_TIME","occupationalCategory":"IT","industry":"Technology","skills":"AI/ML-driven platforms, Analytics tools, GenAI tools, Conversational AI platforms, AI-assisted productivity solutions, Tableau, Microsoft Excel, Microsoft Office, Qualtrics, Fullstory, Mixpanel, A/B Testing tools, Jira, Rally, Contact center and CMS tools, Power BI, Azure ML, AWS SageMaker, Google Vertex AI, LLM-based experience analysis tools, NLP, AI-powered text analytics, Conversational AI systems, Excel, Microsoft 365"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_7af16166-8fd"},"title":"FBS Senior Data Domain Architect","description":"<p>FBS – Farmer Business Services is part of Farmers operations with the purpose of building a global approach to identifying, recruiting, hiring, and retaining top talent. We believe that the foundation of every successful business lies in having the right people with the right skills. That is where we come in—helping Farmers build a winning team that delivers consistent and sustainable results.</p>\n<p><strong>What to expect on your journey with us:</strong></p>\n<ul>\n<li>A solid and innovative company with a strong market presence</li>\n<li>A dynamic, diverse, and multicultural work environment</li>\n<li>Leaders with deep market knowledge and strategic vision</li>\n<li>Continuous learning and development</li>\n</ul>\n<p><strong>Objective:</strong> Designs and develops Data/Domain IT architecture (integrated process, applications, data and technology) solutions to business problems in alignment with the Enterprise Architecture direction and standards.</p>\n<p><strong>Key Responsibilities:</strong></p>\n<ul>\n<li>Utilizes in-depth conceptual and practical knowledge in Domain Architecture and basic knowledge of related job disciplines to perform complex technical planning, architecture development and modification of specifications for Domain solution delivery.</li>\n<li>Solves complex problems and partners effectively to execute broad, continuous Domain level architecture improvement roadmaps that impacts the organization.</li>\n<li>Works independently, receives minimal guidance and direction to solve for and influence Enterprise and System architecture through Domain level knowledge.</li>\n<li>Reviews high level design to ensure alignment to Solution Architecture.</li>\n<li>May lead projects or project steps within a broader project or may have accountability for on-going activities or objectives.</li>\n<li>Mentor developers and create reference implementations/frameworks.</li>\n<li>Partners with System Architects to elaborate capabilities and features.</li>\n<li>Delivers single domain architecture solutions and executes continuous domain level architecture improvement roadmap. Actively supports design and steering of a continuous delivery pipeline.</li>\n</ul>\n<p><strong>Requirements:</strong></p>\n<ul>\n<li>Over 6 years of experience as a senior domain architect for Data domains</li>\n<li>Advanced English Level</li>\n<li>Masters&#39; degree (PLUS)</li>\n<li>Insurance Experience (PLUS) Financial Services (PLUS)</li>\n</ul>\n<p><strong>Technical &amp; Business Skills:</strong></p>\n<ul>\n<li>ETL/ELT Tools (Informatica, DBT) - Advanced (7+ Years)</li>\n<li>Data Architecture / Data Modeling – Advanced (MUST)</li>\n<li>Data Warehouse – Advanced (MUST)</li>\n<li>Cloud Data Platforms - Advanced</li>\n<li>Data Integration Tools – Advanced</li>\n<li>Snowflake or Databricks - Intermediate (4-6 Years) MUST</li>\n<li>Any Cloud - Intermediate (4-6 Years)</li>\n<li>Power BI or Tableau - Intermediate (4-6 Years)</li>\n<li>Data Science tools (Sagemaker, Databricks) - Intermediate (4-6 Years)</li>\n<li>Data Lakehouse – Intermediate (MUST)</li>\n</ul>\n<ul>\n<li>Data Governance - Intermediate</li>\n<li>AI/ML - Entry Level (PLUS)</li>\n<li>Master Data Management - Intermediate</li>\n<li>Operational Data Management - Intermediate</li>\n</ul>\n<p><strong>Benefits:</strong></p>\n<p>This position comes with a competitive compensation and benefits package.</p>\n<ul>\n<li>A competitive salary and performance-based bonuses.</li>\n<li>Comprehensive benefits package.</li>\n<li>Flexible work arrangements (remote and/or office-based).</li>\n<li>You will also enjoy a dynamic and inclusive work culture within a globally renowned group.</li>\n<li>Private Health Insurance.</li>\n<li>Paid Time Off.</li>\n<li>Training &amp; Development opportunities in partnership with renowned companies.</li>\n</ul>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_7af16166-8fd","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Capgemini","sameAs":"https://jobs.workable.com","logo":"https://logos.yubhub.co/view.com.png"},"x-apply-url":"https://jobs.workable.com/view/jdUFHSPZZjHsgd3TR4R3BS/remote-fbs-senior-data-domain-architect-in-colombia-at-capgemini","x-work-arrangement":"remote","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":null,"x-skills-required":["ETL/ELT Tools (Informatica, DBT)","Data Architecture / Data Modeling","Data Warehouse","Cloud Data Platforms","Data Integration Tools","Snowflake or Databricks","Any Cloud","Power BI or Tableau","Data Science tools (Sagemaker, Databricks)","Data Lakehouse"],"x-skills-preferred":["Data Governance","AI/ML","Master Data Management","Operational Data Management"],"datePosted":"2026-03-09T17:00:36.230Z","jobLocationType":"TELECOMMUTE","employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"ETL/ELT Tools (Informatica, DBT), Data Architecture / Data Modeling, Data Warehouse, Cloud Data Platforms, Data Integration Tools, Snowflake or Databricks, Any Cloud, Power BI or Tableau, Data Science tools (Sagemaker, Databricks), Data Lakehouse, Data Governance, AI/ML, Master Data Management, Operational Data Management"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_3b299ab2-218"},"title":"FBS Senior Operations Analyst","description":"<p><strong>Job Description</strong></p>\n<p>As a FBS Senior Operations Analyst at Capgemini, you will play a key role in driving the integration of intelligent automation, predictive analytics, and GenAI capabilities into digital service channels to improve customer experiences, enhance agent efficiency, and optimize operational performance.</p>\n<p><strong>Role Description</strong></p>\n<p>This role operates at the intersection of digital operations, customer experience, and AI-powered transformation. You will help drive the integration of intelligent automation, predictive analytics, and GenAI capabilities into digital service channels to improve customer experiences, enhance agent efficiency, and optimize operational performance.</p>\n<p><strong>Key Responsibilities</strong></p>\n<ul>\n<li>Oversee end-to-end digital service operations across customer and agent journeys, partnering with Product, Technology, and Architecture teams to ensure smooth delivery of features, fixes, and enhancements.</li>\n<li>Define and monitor KPIs such as experience scores, CSAT, FCR, and system uptime to track digital performance across various digital channels.</li>\n<li>Drive real-time customer and agent experience tracking, identify friction points, and lead UX improvement initiatives backed by data analysis and user feedback.</li>\n<li>Manage critical defects and high-priority escalations by coordinating with stakeholder teams to ensure rapid and accurate resolution.</li>\n<li>Build and maintain real-time dashboards to surface trends, generate insights, and inform leadership decisions; leverage tools such as Qualtrics, Mixpanel, and Fullstory etc.</li>\n<li>Use customer and partner research to strengthen analytical capabilities, identify market trends, and recommend customer experience enhancements.</li>\n<li>Partner with data science, analytics, and engineering teams to embed AI-driven insights into digital operations, enabling proactive issue detection and resolution.</li>\n<li>Apply AI-based predictive models to forecast demand, detect patterns, and inform operational and resource planning.</li>\n<li>Evaluate and operationalize AI copilots, GenAI assistants, and automation tools to streamline agent workflows and improve customer self-service.</li>\n<li>Promote responsible AI through governance, transparency, bias monitoring, explainability, and data privacy practices.</li>\n<li>Lead GenAI-powered experiments that include personalized content, conversational experiences, and journey-level optimizations.</li>\n<li>Support AI-driven journey analytics using NLP, clustering, sentiment analysis, and behavioral modeling to improve customer understanding.</li>\n</ul>\n<p><strong>Experience &amp; Education Requirements</strong></p>\n<ul>\n<li>Overall experience &gt; 6-8 years, 4+ years’ experience required with extensive relevant customer understanding/user research/consulting experience, packaging and delivering insights to influence change. 4+ years of combined experience required in analytics, performance reporting, and/or process improvement.</li>\n<li>Advanced project and process management experience. Agile preferred.</li>\n<li>Bachelor&#39;s degree preferred in Technology, Mathematics, Statistics, Business, or related field.</li>\n<li>Master’s degree preferred in Management, Analytics, Artificial Intelligence.</li>\n<li>Experience working with AI/ML-driven platforms or analytics tools (e.g., customer behavioral modeling, NLP pipelines, predictive analytics).</li>\n<li>Background or coursework in AI, machine learning, data science, or human-centered AI preferred.</li>\n<li>Demonstrated ability to translate technical AI concepts into business language for non-technical stakeholders.</li>\n<li>Availability to work in PST time zone.</li>\n</ul>\n<p><strong>Other Critical Skills</strong></p>\n<ul>\n<li>Familiarity with GenAI tools, conversational AI platforms, and AI-assisted productivity solutions.</li>\n<li>Ability to interpret outputs from models using techniques such as anomaly detection, clustering, and predictive scoring.</li>\n<li>Strong understanding of AI ethics, governance, and responsible AI frameworks.</li>\n<li>Comfortable working in environments where AI-driven decision automation informs prioritization and operational strategies.</li>\n<li>Possesses strong technical aptitude. Intermediate knowledge in using analytic tools such as Tableau and Microsoft Excel. Proficient in Microsoft Office including MS Word, Excel, Outlook, PowerPoint.</li>\n<li>Strong verbal communication and listening skills. Strong business acumen, with effective written and verbal communication skills.</li>\n<li>Ability to communicate, influence, and deliver with cross-functional teams and enterprise stakeholders. Ability to manage multiple projects with tight deadlines effectively with cross functional teams.</li>\n</ul>\n<p><strong>Software / Tool Skills</strong></p>\n<ul>\n<li>Experience with AI/ML or data intelligence platforms (e.g., Azure ML, AWS SageMaker, Google Vertex AI—basic exposure acceptable).</li>\n<li>Hands-on familiarity with GenAI copilots, prompt engineering concepts, or LLM-based experience analysis tools.</li>\n<li>Experience interpreting insights from NLP, AI-powered text analytics, or conversational AI systems.</li>\n<li>Hands on experience with user Feedback/Data analysis tools – Qualtrics, Fullstory, Mixpanel etc.</li>\n<li>Good proficiency or practical experience of A/B Testing tools.</li>\n<li>Agile tools such as Jira, Rally etc.</li>\n<li>Understanding of Contact center and CMS tools.</li>\n<li>Excel - Intermediate/Advanced. Proficiency in Microsoft 365</li>\n<li>Power BI (Highly Preferred)</li>\n</ul>\n<p><strong>Requirements</strong></p>\n<ul>\n<li>Optimization Testing (A/B Testing)</li>\n<li>Multivariate testing</li>\n<li>Personalization</li>\n<li>Digital Workflow optimization</li>\n<li>Stakeholder collaboration</li>\n</ul>\n<p><strong>Benefits</strong></p>\n<ul>\n<li>Competitive compensation and benefits package:</li>\n</ul>\n<ol>\n<li>Competitive salary and performance-based bonuses</li>\n<li>Comprehensive benefits package</li>\n<li>Career development and training opportunities</li>\n<li>Flexible work arrangements (remote and/or office-based)</li>\n<li>Dynamic and inclusive work culture within a globally renowned group</li>\n<li>Private Health Insurance</li>\n<li>Pension Plan</li>\n<li>Paid Time Off</li>\n<li>Training &amp; Development</li>\n</ol>\n<p>Note: Benefits differ based on employee level.</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_3b299ab2-218","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Capgemini","sameAs":"https://jobs.workable.com","logo":"https://logos.yubhub.co/view.com.png"},"x-apply-url":"https://jobs.workable.com/view/sATBKqBtk3mUXu5nXF9A86/hybrid-fbs-senior-operations-analyst-in-hyderabad-at-capgemini","x-work-arrangement":"hybrid","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":null,"x-skills-required":["AI/ML-driven platforms","Analytics tools","GenAI tools","Conversational AI platforms","AI-assisted productivity solutions","Tableau","Microsoft Excel","Microsoft Office","Qualtrics","Mixpanel","Fullstory","A/B Testing tools","Jira","Rally","Contact center and CMS tools","Power BI"],"x-skills-preferred":["Azure ML","AWS SageMaker","Google Vertex AI","LLM-based experience analysis tools","NLP","AI-powered text analytics","Conversational AI systems","User Feedback/Data analysis tools","Agile tools"],"datePosted":"2026-03-09T16:59:57.617Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Hyderabad, Telangana, India"}},"employmentType":"FULL_TIME","occupationalCategory":"Operations","industry":"Technology","skills":"AI/ML-driven platforms, Analytics tools, GenAI tools, Conversational AI platforms, AI-assisted productivity solutions, Tableau, Microsoft Excel, Microsoft Office, Qualtrics, Mixpanel, Fullstory, A/B Testing tools, Jira, Rally, Contact center and CMS tools, Power BI, Azure ML, AWS SageMaker, Google Vertex AI, LLM-based experience analysis tools, NLP, AI-powered text analytics, Conversational AI systems, User Feedback/Data analysis tools, Agile tools"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_7b03b30a-b20"},"title":"FBS Senior Data Domain Architect","description":"<p>FBS – Farmer Business Services is part of Farmers operations with the purpose of building a global approach to identifying, recruiting, hiring, and retaining top talent. By combining international reach with US expertise, we build diverse and high-performing teams that are equipped to thrive in today’s competitive marketplace.</p>\n<p>We believe that the foundation of every successful business lies in having the right people with the right skills. That is where we come in—helping Farmers build a winning team that delivers consistent and sustainable results.</p>\n<p>Since we don’t have a local legal entity, we’ve partnered with Capgemini, which acts as the Employer of Record. Capgemini is responsible for managing local payroll and benefits.</p>\n<p><strong>Objective:</strong> Designs and develops Data/Domain IT architecture (integrated process, applications, data and technology) solutions to business problems in alignment with the Enterprise Architecture direction and standards.</p>\n<p>**Key Responsibilities:*</p>\n<ul>\n<li>Utilizes in-depth conceptual and practical knowledge in Domain Architecture and basic knowledge of related job disciplines to perform complex technical planning, architecture development and modification of specifications for Domain solution delivery.</li>\n</ul>\n<ul>\n<li>Solves complex problems and partners effectively to execute broad, continuous Domain level architecture improvement roadmaps that impacts the organization.</li>\n</ul>\n<ul>\n<li>Works independently, receives minimal guidance and direction to solve for and influence Enterprise and System architecture through Domain level knowledge.</li>\n</ul>\n<ul>\n<li>Reviews high level design to ensure alignment to Solution Architecture.</li>\n</ul>\n<ul>\n<li>May lead projects or project steps within a broader project or may have accountability for on-going activities or objectives.</li>\n</ul>\n<ul>\n<li>Mentor developers and create reference implementations/frameworks.</li>\n</ul>\n<ul>\n<li>Partners with System Architects to elaborate capabilities and features.</li>\n</ul>\n<ul>\n<li>Delivers single domain architecture solutions and executes continuous domain level architecture improvement roadmap. Actively supports design and steering of a continuous delivery pipeline.</li>\n</ul>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_7b03b30a-b20","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Capgemini","sameAs":"https://jobs.workable.com","logo":"https://logos.yubhub.co/view.com.png"},"x-apply-url":"https://jobs.workable.com/view/1U952YA2QBa8zK7Tm5d3Lm/remote-fbs-senior-data-domain-architect-in-mexico-at-capgemini","x-work-arrangement":"remote","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":null,"x-skills-required":["ETL/ELT Tools (Informatica, DBT)","Data Architecture / Data Modeling","Data Warehouse","Cloud Data Platforms","Data Integration Tools","Snowflake or Databricks","Any Cloud","Power BI or Tableau","Data Science tools (Sagemaker, Databricks)","Data Lakehouse","Data Governance","Master Data Management","Operational Data Management"],"x-skills-preferred":["AI/ML"],"datePosted":"2026-03-09T16:59:14.361Z","jobLocationType":"TELECOMMUTE","employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"ETL/ELT Tools (Informatica, DBT), Data Architecture / Data Modeling, Data Warehouse, Cloud Data Platforms, Data Integration Tools, Snowflake or Databricks, Any Cloud, Power BI or Tableau, Data Science tools (Sagemaker, Databricks), Data Lakehouse, Data Governance, Master Data Management, Operational Data Management, AI/ML"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_dcfed817-412"},"title":"FBS Senior Data Domain Architect","description":"<p>We&#39;re looking for a Senior Data Domain Architect to join our team. As a Senior Data Domain Architect, you will design and develop Data/Domain IT architecture solutions to business problems in alignment with the Enterprise Architecture direction and standards.</p>\n<p><strong>What to expect on your journey with us:</strong></p>\n<ul>\n<li>A solid and innovative company with a strong market presence</li>\n<li>A dynamic, diverse, and multicultural work environment</li>\n<li>Leaders with deep market knowledge and strategic vision</li>\n<li>Continuous learning and development</li>\n</ul>\n<p><strong>Key Responsibilities:</strong></p>\n<ul>\n<li>Utilize in-depth conceptual and practical knowledge in Domain Architecture and basic knowledge of related job disciplines to perform complex technical planning, architecture development and modification of specifications for Domain solution delivery</li>\n<li>Solve complex problems and partner effectively to execute broad, continuous Domain level architecture improvement roadmaps that impacts the organization</li>\n<li>Work independently, receives minimal guidance and direction to solve for and influence Enterprise and System architecture through Domain level knowledge</li>\n<li>Review high level design to ensure alignment to Solution Architecture</li>\n<li>May lead projects or project steps within a broader project or may have accountability for on-going activities or objectives</li>\n<li>Mentor developers and create reference implementations/frameworks</li>\n<li>Partner with System Architects to elaborate capabilities and features</li>\n<li>Deliver single domain architecture solutions and execute continuous domain level architecture improvement roadmap. Actively supports design and steering of a continuous delivery pipeline</li>\n</ul>\n<p><strong>Requirements:</strong></p>\n<ul>\n<li>Over 6 years of experience as a senior domain architect for Data domains</li>\n<li>Advanced English Level</li>\n<li>Masters&#39; degree (PLUS)</li>\n<li>Insurance Experience (PLUS) Financial Services (PLUS)</li>\n</ul>\n<p><strong>Technical &amp; Business Skills:</strong></p>\n<ul>\n<li>ETL/ELT Tools (Informatica, DBT) - Advanced (7+ Years)</li>\n<li>Data Architecture / Data Modeling – Advanced (MUST)</li>\n<li>Data Warehouse – Advanced (MUST)</li>\n<li>Cloud Data Platforms - Advanced</li>\n<li>Data Integration Tools – Advanced</li>\n<li>Snowflake or Databricks - Intermediate (4-6 Years) MUST</li>\n<li>Any Cloud - Intermediate (4-6 Years)</li>\n<li>Power BI or Tableau - Intermediate (4-6 Years)</li>\n<li>Data Science tools (Sagemaker, Databricks) - Intermediate (4-6 Years)</li>\n<li>Data Lakehouse – Intermediate (MUST)</li>\n</ul>\n<p><strong>Benefits:</strong></p>\n<ul>\n<li>A competitive salary and performance-based bonuses</li>\n<li>Comprehensive benefits package</li>\n<li>Flexible work arrangements (remote and/or office-based)</li>\n<li>Private Health Insurance</li>\n<li>Paid Time Off</li>\n<li>Training &amp; Development opportunities in partnership with renowned companies</li>\n</ul>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_dcfed817-412","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Capgemini","sameAs":"https://jobs.workable.com","logo":"https://logos.yubhub.co/view.com.png"},"x-apply-url":"https://jobs.workable.com/view/x7tKXYFBB815ca6oBV5T2E/remote-fbs-senior-data-domain-architect-in-brazil-at-capgemini","x-work-arrangement":"remote","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":null,"x-skills-required":["ETL/ELT Tools (Informatica, DBT)","Data Architecture / Data Modeling","Data Warehouse","Cloud Data Platforms","Data Integration Tools","Snowflake or Databricks","Any Cloud","Power BI or Tableau","Data Science tools (Sagemaker, Databricks)","Data Lakehouse"],"x-skills-preferred":[],"datePosted":"2026-03-09T16:53:31.425Z","jobLocationType":"TELECOMMUTE","employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"ETL/ELT Tools (Informatica, DBT), Data Architecture / Data Modeling, Data Warehouse, Cloud Data Platforms, Data Integration Tools, Snowflake or Databricks, Any Cloud, Power BI or Tableau, Data Science tools (Sagemaker, Databricks), Data Lakehouse"}]}