{"version":"0.1","company":{"name":"YubHub","url":"https://yubhub.co","jobsUrl":"https://yubhub.co/jobs/skill/cloud-technology"},"x-facet":{"type":"skill","slug":"cloud-technology","display":"Cloud Technology","count":37},"x-feed-size-limit":100,"x-feed-sort":"enriched_at desc","x-feed-notice":"This feed contains at most 100 jobs (the most recently enriched). For the full corpus, use the paginated /stats/by-facet endpoint or /search.","x-generator":"yubhub-xml-generator","x-rights":"Free to redistribute with attribution: \"Data by YubHub (https://yubhub.co)\"","x-schema":"Each entry in `jobs` follows https://schema.org/JobPosting. YubHub-native raw fields carry `x-` prefix.","jobs":[{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_038b4893-89b"},"title":"IT Audit Lead","description":"<p>We are seeking an IT Audit Lead to join our Management Controls and Internal Audit Group. As an IT Audit Lead, you will be responsible for leading IT audit engagements, planning and carrying out the audit, and continuously working to improve processes and procedures. You will work closely with the Head of Information Technology Audit to develop and maintain an in-depth understanding of the technology organization, business areas, and support functions.</p>\n<p>Primary Responsibilities:</p>\n<ul>\n<li>Lead and perform IT and integrated audit engagements, with support from IT Auditors, focusing on IT core infrastructure, trade execution and trade processing infrastructure, critical applications, and IT general controls;</li>\n<li>Build and maintain relationships with key stakeholders, establishing a culture of engagement while adding value;</li>\n<li>Develop and maintain an in-depth understanding of the technology organization, business areas, and support functions;</li>\n<li>Support the Head of Information Technology Audit with audit planning, scope design, internal control assessment, raising and reporting of issues, and monitoring of remediation plans;</li>\n<li>Participate in department-wide initiatives focused on continually improving firm processes and the control environment;</li>\n<li>Assist with annual risk assessment process, audit plan creation, and other departmental administrative projects.</li>\n</ul>\n<p>Qualifications/Skills Required:</p>\n<ul>\n<li>12+ years of IT audit experience with exposure to core IT infrastructure, cyber security, equities trading, fixed-income trading, operations, and/or trade support functions;</li>\n<li>Strong analytical and reporting skills and effective relationship-building experience;</li>\n<li>Effective communication (verbal and written) and inter-personal skills, with the ability to present sophisticated and sensitive issues to management and inspire change;</li>\n<li>Knowledge and experience of core IT infrastructure platforms (e.g., Windows, Unix, Sybase, SQL), cyber security, cloud technology, networks, firewalls, and/or data analytics;</li>\n<li>Extensive knowledge of the audit lifecycle and the evaluation of IT general controls and IT automated controls;</li>\n<li>Bachelor’s degree in Information Systems, Computer Science/Engineering, or other relevant fields;</li>\n<li>A related certification (e.g., CISA, CISSP, CIA) is desired;</li>\n<li>Domestic and international travel requirements: 0%-10%.</li>\n</ul>\n<p>The estimated base salary range for this position is $160,000 to $250,000, which is specific to New York and may change in the future.</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_038b4893-89b","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Audit","sameAs":"https://mlp.eightfold.ai","logo":"https://logos.yubhub.co/mlp.eightfold.ai.png"},"x-apply-url":"https://mlp.eightfold.ai/careers/job/755953849622","x-work-arrangement":"onsite","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$160,000 to $250,000","x-skills-required":["IT audit experience","core IT infrastructure","cyber security","equities trading","fixed-income trading","operations","trade support functions","analytical and reporting skills","relationship-building experience","communication (verbal and written) and inter-personal skills","knowledge of core IT infrastructure platforms","cloud technology","networks","firewalls","data analytics","audit lifecycle","IT general controls","IT automated controls"],"x-skills-preferred":[],"datePosted":"2026-04-18T22:14:05.689Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"New York, New York, United States of America"}},"employmentType":"FULL_TIME","occupationalCategory":"IT","industry":"Finance","skills":"IT audit experience, core IT infrastructure, cyber security, equities trading, fixed-income trading, operations, trade support functions, analytical and reporting skills, relationship-building experience, communication (verbal and written) and inter-personal skills, knowledge of core IT infrastructure platforms, cloud technology, networks, firewalls, data analytics, audit lifecycle, IT general controls, IT automated controls","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":160000,"maxValue":250000,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_e8abf445-c26"},"title":"Staff Applied AI Engineer, Enterprise GenAI","description":"<p>We&#39;re looking for a Staff Applied AI Engineer to join our Enterprise Engineering team. As an Applied AI Engineer, you&#39;ll work with clients to create ML solutions to satisfy their business needs. Your work will range from building next-generation AI cybersecurity firewalls to creating transformative AI experiences in journalism to applying foundation genomic models making predictions about life-saving drug proteins.</p>\n<p>Daily data-driven experiments will provide key insights around model strengths and inefficiencies which you&#39;ll use to improve your product&#39;s performance. If you are excited about shaping the future of the modern AI movement, we would love to hear from you!</p>\n<p>You will:</p>\n<ul>\n<li>Own, plan, and optimize the AI behind our Enterprise customer&#39;s deepest technical problems</li>\n<li>Leverage SGP to build the most advanced AI agents across the industry including multimodal functionality, tool-calling, and more</li>\n<li>Have experience gathering business requirements and translating them into technical solutions</li>\n<li>Meet regularly with customer teams onsite and virtually, collaborating cross-functionally with all teams responsible for their data and ML needs</li>\n<li>Push production code in multiple development environments, writing and debugging code directly in both our customer&#39;s and Scale&#39;s codebases.</li>\n</ul>\n<p>Ideally you&#39;d have:</p>\n<ul>\n<li>7+ years of full-time engineering experience, post-graduation</li>\n<li>A love for solving deeply complex technical problems with ambiguity using state of the art research and AI to accomplish your client’s business goals</li>\n<li>Strong engineering background: a Bachelor’s degree in Computer Science, Mathematics, or another quantitative field or equivalent strong engineering background.</li>\n<li>Deep familiarity with a data-driven approach when iterating on machine learning models and how changes in datasets can influence model results</li>\n<li>Experience working with cloud technology stack (eg. AWS or GCP) and developing machine learning models in a cloud environment</li>\n<li>Proficiency in Python to write, test and debug code using common libraries (ie numpy, pandas)</li>\n</ul>\n<p>Nice to haves:</p>\n<ul>\n<li>Strong knowledge of software engineering best practices</li>\n<li>Have built applications taking advantage of Generative AI in real, production use cases</li>\n<li>Familiarity with state of the art LLMs and their strengths/weaknesses</li>\n</ul>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_e8abf445-c26","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Scale","sameAs":"https://scale.com/","logo":"https://logos.yubhub.co/scale.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/scaleai/jobs/4683689005","x-work-arrangement":"hybrid","x-experience-level":"staff","x-job-type":"full-time","x-salary-range":"$216,000-$270,000 USD","x-skills-required":["Python","Machine Learning","Cloud Technology Stack","Data-Driven Approach","Software Engineering Best Practices"],"x-skills-preferred":["Generative AI","State of the Art LLMs"],"datePosted":"2026-04-18T16:00:44.071Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"San Francisco, CA; Seattle, WA; New York, NY"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"Python, Machine Learning, Cloud Technology Stack, Data-Driven Approach, Software Engineering Best Practices, Generative AI, State of the Art LLMs","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":216000,"maxValue":270000,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_5aa5b947-f4d"},"title":"Staff Machine Learning Research Scientist/ Engineer, Agents","description":"<p>About Scale AI</p>\n<p>At Scale AI, our mission is to accelerate the development of AI applications. This role is at the intersection of cutting-edge AI research and practical application, with a focus on studying the data types essential for building state-of-the-art agents.</p>\n<p>Responsibilities</p>\n<ul>\n<li>Explore the data landscape needed to advance intelligent, adaptable AI agents, guiding the data strategy at Scale to drive innovation.</li>\n<li>Contribute to impactful research publications on agents, collaborate with customer researchers, and work alongside the engineering team to translate these advancements into real-world, scalable solutions.</li>\n</ul>\n<p>Requirements</p>\n<ul>\n<li>Practical experience working with LLMs, with proficiency in frameworks like Pytorch, Jax, or Tensorflow.</li>\n<li>A track record of published research in top ML venues (e.g., ACL, EMNLP, NAACL, NeurIPS, ICML, ICLR, COLM, etc.).</li>\n<li>At least three years of experience addressing sophisticated ML problems, either in a research setting or product development.</li>\n</ul>\n<p>Nice to Have</p>\n<ul>\n<li>Hands-on experience with open source LLM fine-tuning or involvement in bespoke LLM fine-tuning projects using Pytorch/Jax.</li>\n<li>Hands-on experience and publications in building applications and evaluations related to AI agents such as tool-use, text2SQL, browser agents, coding agents and GUI agents.</li>\n<li>Hands-on experience with agent frameworks such as OpenHands, Swarm, LangGraph, etc.</li>\n<li>Familiarity with agentic reasoning methods such as STaR and PLANSEARCH</li>\n<li>Experience working with cloud technology stack (eg. AWS or GCP) and developing machine learning models in a cloud environment.</li>\n</ul>\n<p>Benefits</p>\n<ul>\n<li>Comprehensive health, dental and vision coverage</li>\n<li>Retirement benefits</li>\n<li>A learning and development stipend</li>\n<li>Generous PTO</li>\n<li>Commuter stipend</li>\n</ul>\n<p>Salary Range</p>\n<p>$259,200-$324,000 USD</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_5aa5b947-f4d","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Scale AI","sameAs":"https://scale.com/","logo":"https://logos.yubhub.co/scale.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/scaleai/jobs/4488520005","x-work-arrangement":"hybrid","x-experience-level":"staff","x-job-type":"full-time","x-salary-range":"$259,200-$324,000 USD","x-skills-required":["Pytorch","Jax","Tensorflow","LLMs","Agent frameworks","Agentic reasoning methods","Cloud technology stack"],"x-skills-preferred":["Open source LLM fine-tuning","Bespoke LLM fine-tuning projects"],"datePosted":"2026-04-18T15:59:17.656Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"San Francisco, CA; Seattle, WA; New York, NY"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"Pytorch, Jax, Tensorflow, LLMs, Agent frameworks, Agentic reasoning methods, Cloud technology stack, Open source LLM fine-tuning, Bespoke LLM fine-tuning projects","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":259200,"maxValue":324000,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_8b8cbfe7-a98"},"title":"Senior Software Engineer, Echo","description":"<p>Ready to be pushed beyond what you think you’re capable of?</p>\n<p>At Coinbase, our mission is to increase economic freedom in the world.</p>\n<p>We&#39;re seeking a very specific candidate who is passionate about our mission and who believes in the power of crypto and blockchain technology to update the financial system.</p>\n<p>As a Senior Software Engineer on the Echo team, you will solve unique, large scale, highly complex technical problems, bridging the constraints posed by web-scale applications and blockchain technology.</p>\n<p>You will help build the next generation of systems to make cryptocurrency accessible to everyone across the globe, operating real-time applications with high frequency, low latency updates, and managing the most secure, dockerized infrastructure running in the cloud.</p>\n<p>The Echo team is responsible for two innovative products in the capital formation space; Echo and Sonar.</p>\n<p>We are a small team that operate as a startup within the larger org, and we’re committed to shipping impactful product at a fast pace.</p>\n<p>Echo, our marketplace for private investments has facilitated over 300 deals and $150m invested since 2024, and Sonar - our public sales and compliance platform - enables customers to run their own token sales.</p>\n<p>Our engineering team works across the whole stack and is empowered to take ownership of large projects.</p>\n<p>What you&#39;ll be doing:</p>\n<ul>\n<li>Build new services to meet critical product and business needs using Golang.</li>\n</ul>\n<ul>\n<li>Design scalable systems to solve novel problems with modern cloud technology and industry best practices.</li>\n</ul>\n<ul>\n<li>Articulate a long term vision for maintaining and scaling our backend systems and the teams running them.</li>\n</ul>\n<ul>\n<li>Work with engineers, designers, product managers and senior leadership to turn our product and technical vision into a tangible roadmap every quarter.</li>\n</ul>\n<ul>\n<li>Write high quality, well tested code to meet the needs of your customers.</li>\n</ul>\n<p>What we look for in you:</p>\n<ul>\n<li>You have at least 5 years of experience in software engineering.</li>\n</ul>\n<ul>\n<li>You’ve designed, built, scaled and maintained production services, and know how to compose a service oriented architecture.</li>\n</ul>\n<ul>\n<li>You write high quality, well tested code to meet the needs of your customers.</li>\n</ul>\n<ul>\n<li>You’re passionate about building an open financial system that brings the world together.</li>\n</ul>\n<ul>\n<li>Demonstrates the ability to responsibly use generative AI tools and copilots (e.g., LibreChat, Gemini, Glean) in daily workflows, continuously learn as tools evolve, and apply human-in-the-loop practices to deliver business-ready outputs and drive measurable improvements in efficiency, cost, and quality.</li>\n</ul>\n<p>Nice to haves:</p>\n<ul>\n<li>You have gone through a rapid growth in your company (from startup to mid-size).</li>\n</ul>\n<ul>\n<li>Experience with growth experiments or A/B testing frameworks.</li>\n</ul>\n<ul>\n<li>You have experience with Blockchain technology (such as Bitcoin, Ethereum etc..)</li>\n</ul>\n<ul>\n<li>You have experience decomposing a large monolith into microservices.</li>\n</ul>\n<ul>\n<li>You’ve worked with Golang, Ruby, Docker, Rails, Postgres, MongoDB or DynamoDB.</li>\n</ul>\n<ul>\n<li>You’ve built financial, high reliability or security systems.</li>\n</ul>\n<p>Job #: (GB-CFBE05UK-Q126)</p>\n<p>#LI-Remote</p>\n<p>Pay Transparency Notice: The target annual base salary for this position can range as detailed below. Total compensation may also include equity and bonus eligibility and benefits (including medical, dental, and vision).</p>\n<p>Annual base salary range (excluding equity and bonus):</p>\n<p>£122,400-£136,000 GBP</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_8b8cbfe7-a98","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Coinbase","sameAs":"https://www.coinbase.com/","logo":"https://logos.yubhub.co/coinbase.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/coinbase/jobs/7569402","x-work-arrangement":"remote","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"£122,400-£136,000 GBP","x-skills-required":["Golang","Cloud technology","Service-oriented architecture","Blockchain technology","Generative AI tools and copilots","Ruby","Docker","Rails","Postgres","MongoDB","DynamoDB"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:58:15.071Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Remote - UK"}},"jobLocationType":"TELECOMMUTE","employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"Golang, Cloud technology, Service-oriented architecture, Blockchain technology, Generative AI tools and copilots, Ruby, Docker, Rails, Postgres, MongoDB, DynamoDB","baseSalary":{"@type":"MonetaryAmount","currency":"GBP","value":{"@type":"QuantitativeValue","minValue":122400,"maxValue":136000,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_fdc6f0f9-900"},"title":"Resident Solutions Architect - Communications, Media, Entertainment & Games","description":"<p>As a Resident Solutions Architect in our Professional Services team, you will work with clients on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>You will report to the regional Manager/Lead.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>Work on a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n<li>Consult on architecture and design; bootstrap or implement customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n<li>Provide an escalated level of support for customer operational issues.</li>\n<li>Work with the Databricks technical team, Project Manager, Architect and Customer team to ensure the technical components of the engagement are delivered to meet customer&#39;s needs.</li>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues.</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>6+ years experience in data engineering, data platforms &amp; analytics</li>\n<li>Comfortable writing code in either Python or Scala</li>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n<li>Familiarity with CI/CD for production deployments</li>\n<li>Working knowledge of MLOps</li>\n<li>Design and deployment of performant end-to-end data architectures</li>\n<li>Experience with technical project delivery - managing scope and timelines.</li>\n<li>Documentation and white-boarding skills.</li>\n<li>Experience working with clients and managing conflicts.</li>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects.</li>\n<li>Travel to customers 20% of the time</li>\n</ul>\n<p>Databricks Certification</p>\n<p>Pay Range Transparency</p>\n<p>Databricks is committed to fair and equitable compensation practices. The pay range(s) for this role is listed below and represents the expected base salary range for non-commissionable roles or on-target earnings for commissionable roles.</p>\n<p>Actual compensation packages are based on several factors that are unique to each candidate, including but not limited to job-related skills, depth of experience, relevant certifications and training, and specific work location.</p>\n<p>Based on the factors above, Databricks anticipated utilizing the full width of the range.</p>\n<p>The total compensation package for this position may also include eligibility for annual performance bonus, equity, and the benefits listed above.</p>\n<p>For more information regarding which range your location is in visit our page here.</p>\n<p>Zone 1 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 2 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 3 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 4 Pay Range $180,656-$248,360 USD</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_fdc6f0f9-900","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8461168002","x-work-arrangement":"onsite","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["data engineering","data science","cloud technology","Apache Spark","distributed computing","CI/CD","MLOps","performant end-to-end data architectures","technical project delivery","documentation and white-boarding skills","client management"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:57:29.214Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Los Angeles, California"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, distributed computing, CI/CD, MLOps, performant end-to-end data architectures, technical project delivery, documentation and white-boarding skills, client management","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_45cde3e1-29d"},"title":"Applied AI Engineer, Enterprise GenAI","description":"<p>We&#39;re looking for an Applied AI Engineer to join our Enterprise Engineering team. As an Applied AI Engineer, you&#39;ll work with clients to create ML solutions to satisfy their business needs. Your work will range from building next-generation AI cybersecurity firewalls to creating transformative AI experiences in journalism to applying foundation genomic models making predictions about life-saving drug proteins.</p>\n<p>Daily data-driven experiments will provide key insights around model strengths and inefficiencies which you&#39;ll use to improve your product&#39;s performance. You&#39;ll own, plan, and optimize the AI behind our Enterprise customer&#39;s deepest technical problems, leveraging our Scale Generative Platform (SGP) to build the most advanced AI agents across the industry.</p>\n<p>Responsibilities:</p>\n<ul>\n<li>Own, plan, and optimize the AI behind our Enterprise customer&#39;s deepest technical problems</li>\n<li>Leverage SGP to build the most advanced AI agents across the industry including multimodal functionality, tool-calling, and more</li>\n<li>Have experience gathering business requirements and translating them into technical solutions</li>\n<li>Meet regularly with customer teams onsite and virtually, collaborating cross-functionally with all teams responsible for their data and ML needs</li>\n<li>Push production code in multiple development environments, writing and debugging code directly in both our customer&#39;s and Scale&#39;s codebases.</li>\n</ul>\n<p>Ideal candidate will have a love for solving deeply complex technical problems with ambiguity using state of the art research and AI to accomplish your client&#39;s business goals, a strong engineering background, deep familiarity with a data-driven approach when iterating on machine learning models, and experience working with cloud technology stack and developing machine learning models in a cloud environment.</p>\n<p>Nice to have: strong knowledge of software engineering best practices, experience building applications taking advantage of Generative AI in real, production use cases, and familiarity with state of the art LLMs and their strengths/weaknesses.</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_45cde3e1-29d","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Scale","sameAs":"https://scale.com/","logo":"https://logos.yubhub.co/scale.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/scaleai/jobs/4514173005","x-work-arrangement":"hybrid","x-experience-level":"mid","x-job-type":"full-time","x-salary-range":"$216,000-$270,000 USD","x-skills-required":["Python","Machine Learning","Cloud Technology Stack","Data-Driven Approach","Software Engineering Best Practices"],"x-skills-preferred":["Generative AI","State of the Art LLMs","Multimodal Functionality","Tool-Calling"],"datePosted":"2026-04-18T15:56:38.201Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"San Francisco, CA; New York, NY"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"Python, Machine Learning, Cloud Technology Stack, Data-Driven Approach, Software Engineering Best Practices, Generative AI, State of the Art LLMs, Multimodal Functionality, Tool-Calling","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":216000,"maxValue":270000,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_5ceb4835-0f1"},"title":"Manager, Professional Services","description":"<p>As a Manager, Professional Services, you will work with clients on short to medium-term customer engagements on their big data challenges using the Databricks platform. You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers get the most value out of their data.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>You will work on a variety of impactful customer technical big data projects which may include building reference architectures, how-to&#39;s, and production-grade MVPs.</li>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build, and deployment of industry-leading big data and AI applications.</li>\n<li>Consult on architecture and design; bootstrap or implement strategic customer projects which lead to a customer&#39;s successful understanding, evaluation, and adoption of Databricks.</li>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement-specific product and support issues.</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>10+ years of experience with Big Data Technologies such as Apache Spark, Kafka, Cloud Native, and Data Lakes in a customer-facing post-sales, technical architecture, or consulting role.</li>\n<li>4+ years of people management experience, managing a team of Data Engineers, Data Architects, etc.</li>\n<li>6+ years of experience working on Big Data Architectures independently.</li>\n<li>Experience working across Cloud Platforms (GCP/AWS/Azure).</li>\n<li>Experience working on Databricks platform is a plus.</li>\n<li>Documentation and white-boarding skills.</li>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects.</li>\n<li>Willingness to travel for onsite customer engagements within India.</li>\n</ul>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_5ceb4835-0f1","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8503068002","x-work-arrangement":"remote","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":null,"x-skills-required":["Apache Spark","Kafka","Cloud Native","Data Lakes","Big Data Technologies","Data Engineering","Data Science","Cloud Technology","People Management","Team Leadership"],"x-skills-preferred":["Databricks","GCP","AWS","Azure","Documentation","White-boarding"],"datePosted":"2026-04-18T15:56:03.190Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Remote - India"}},"jobLocationType":"TELECOMMUTE","employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"Apache Spark, Kafka, Cloud Native, Data Lakes, Big Data Technologies, Data Engineering, Data Science, Cloud Technology, People Management, Team Leadership, Databricks, GCP, AWS, Azure, Documentation, White-boarding"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_d1ee6aec-ec7"},"title":"Applied AI Engineer, Enterprise","description":"<p>We&#39;re looking for an Applied AI Engineer to join our Enterprise Engineering team. As an Applied AI Engineer, you&#39;ll work with clients to create ML solutions to satisfy their business needs. Your work will range from building next-generation AI cybersecurity firewalls to creating transformative AI experiences in journalism to applying foundation genomic models making predictions about life-saving drug proteins.</p>\n<p>Daily data-driven experiments will provide key insights around model strengths and inefficiencies which you&#39;ll use to improve your product&#39;s performance. You&#39;ll own, plan, and optimize the AI behind our Enterprise customer&#39;s deepest technical problems, leveraging our Scale Generative Platform (SGP) to build the most advanced AI agents across the industry.</p>\n<p>Responsibilities:</p>\n<ul>\n<li>Own, plan, and optimize the AI behind our Enterprise customer&#39;s deepest technical problems</li>\n<li>Leverage SGP to build the most advanced AI agents across the industry, including multimodal functionality, tool-calling, and more</li>\n<li>Have experience gathering business requirements and translating them into technical solutions</li>\n<li>Meet regularly with customer teams onsite and virtually, collaborating cross-functionally with all teams responsible for their data and ML needs</li>\n<li>Push production code in multiple development environments, writing and debugging code directly in both our customer&#39;s and Scale&#39;s codebases</li>\n</ul>\n<p>Ideally, you&#39;d have:</p>\n<ul>\n<li>A love for solving deeply complex technical problems with ambiguity using state of the art research and AI to accomplish your client&#39;s business goals</li>\n<li>Strong engineering background: a Bachelor’s degree in Computer Science, Mathematics, or another quantitative field or equivalent strong engineering background</li>\n<li>Deep familiarity with a data-driven approach when iterating on machine learning models and how changes in datasets can influence model results</li>\n<li>Experience working with cloud technology stack (eg. AWS or GCP) and developing machine learning models in a cloud environment</li>\n<li>Proficiency in Python to write, test and debug code using common libraries (ie numpy, pandas)</li>\n</ul>\n<p>Nice to haves:</p>\n<ul>\n<li>Strong knowledge of software engineering best practices</li>\n<li>Have built applications taking advantage of Generative AI in real, production use cases</li>\n<li>Familiarity with state of the art LLMs and their strengths/weaknesses</li>\n</ul>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_d1ee6aec-ec7","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Scale","sameAs":"https://scale.com/","logo":"https://logos.yubhub.co/scale.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/scaleai/jobs/4536659005","x-work-arrangement":"onsite","x-experience-level":"mid","x-job-type":"full-time","x-salary-range":null,"x-skills-required":["Python","Machine Learning","Cloud Technology Stack","Data-Driven Approach","Software Engineering Best Practices"],"x-skills-preferred":["Generative AI","State of the Art LLMs"],"datePosted":"2026-04-18T15:55:42.415Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"London, UK"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"Python, Machine Learning, Cloud Technology Stack, Data-Driven Approach, Software Engineering Best Practices, Generative AI, State of the Art LLMs"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_0a7cad02-cd5"},"title":"Resident Solutions Architect - Manufacturing","description":"<p>As a Resident Solutions Architect (RSA) on our Professional Services team, you will work with customers on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>Handle a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n</ul>\n<ul>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n</ul>\n<ul>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n</ul>\n<ul>\n<li>Consult on architecture and design; bootstrap or implement customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n</ul>\n<ul>\n<li>Provide an escalated level of support for customer operational issues</li>\n</ul>\n<ul>\n<li>Collaborate with the Databricks Technical, Project Manager, Architect and Customer teams to ensure the technical components of the engagement are delivered to meet customer&#39;s needs</li>\n</ul>\n<ul>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>6+ years experience in data engineering, data platforms &amp; analytics</li>\n</ul>\n<ul>\n<li>Comfortable writing code in either Python or Scala</li>\n</ul>\n<ul>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n</ul>\n<ul>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n</ul>\n<ul>\n<li>Familiarity with CI/CD for production deployments</li>\n</ul>\n<ul>\n<li>Working knowledge of MLOps</li>\n</ul>\n<ul>\n<li>Design and deployment of performant end-to-end data architectures</li>\n</ul>\n<ul>\n<li>Experience with technical project delivery - managing scope and timelines</li>\n</ul>\n<ul>\n<li>Documentation and white-boarding skills</li>\n</ul>\n<ul>\n<li>Experience working with clients and managing conflicts</li>\n</ul>\n<ul>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects</li>\n</ul>\n<ul>\n<li>Ability to travel up to 30% when needed</li>\n</ul>\n<p>Pay Range Transparency Databricks is committed to fair and equitable compensation practices. The pay range(s) for this role is listed below and represents the expected base salary range for non-commissionable roles or on-target earnings for commissionable roles.</p>\n<p>Actual compensation packages are based on several factors that are unique to each candidate, including but not limited to job-related skills, depth of experience, relevant certifications and training, and specific work location.</p>\n<p>Based on the factors above, Databricks anticipated utilizing the full width of the range.</p>\n<p>The total compensation package for this position may also include eligibility for annual performance bonus, equity, and the benefits listed above.</p>\n<p>For more information regarding which range your location is in visit our page here.</p>\n<p>Zone 1 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 2 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 3 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 4 Pay Range $180,656-$248,360 USD</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_0a7cad02-cd5","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8494155002","x-work-arrangement":"onsite","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["data engineering","data science","cloud technology","Apache Spark","CI/CD","MLOps","distributed computing","Python","Scala","AWS","Azure","GCP"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:55:20.115Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Philadelphia, Pennsylvania"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, CI/CD, MLOps, distributed computing, Python, Scala, AWS, Azure, GCP","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_fc79e6e5-5c0"},"title":"Resident Solutions Architect - Manufacturing","description":"<p>As a Resident Solutions Architect (RSA) on our Professional Services team, you will work with customers on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>Handle a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n</ul>\n<ul>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n</ul>\n<ul>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n</ul>\n<ul>\n<li>Consult on architecture and design; bootstrap or implement customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n</ul>\n<ul>\n<li>Provide an escalated level of support for customer operational issues</li>\n</ul>\n<ul>\n<li>Collaborate with the Databricks Technical, Project Manager, Architect and Customer teams to ensure the technical components of the engagement are delivered to meet customer&#39;s needs</li>\n</ul>\n<ul>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>6+ years experience in data engineering, data platforms &amp; analytics</li>\n</ul>\n<ul>\n<li>Comfortable writing code in either Python or Scala</li>\n</ul>\n<ul>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n</ul>\n<ul>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n</ul>\n<ul>\n<li>Familiarity with CI/CD for production deployments</li>\n</ul>\n<ul>\n<li>Working knowledge of MLOps</li>\n</ul>\n<ul>\n<li>Design and deployment of performant end-to-end data architectures</li>\n</ul>\n<ul>\n<li>Experience with technical project delivery - managing scope and timelines</li>\n</ul>\n<ul>\n<li>Documentation and white-boarding skills</li>\n</ul>\n<ul>\n<li>Experience working with clients and managing conflicts</li>\n</ul>\n<ul>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects</li>\n</ul>\n<ul>\n<li>Ability to travel up to 30% when needed</li>\n</ul>\n<p>Pay Range Transparency Databricks is committed to fair and equitable compensation practices. The pay range(s) for this role is listed below and represents the expected base salary range for non-commissionable roles or on-target earnings for commissionable roles. Actual compensation packages are based on several factors that are unique to each candidate, including but not limited to job-related skills, depth of experience, relevant certifications and training, and specific work location. Based on the factors above, Databricks anticipated utilizing the full width of the range. The total compensation package for this position may also include eligibility for annual performance bonus, equity, and the benefits listed above.</p>\n<p>For more information regarding which range your location is in visit our page here.</p>\n<p>Zone 1 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 2 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 3 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 4 Pay Range $180,656-$248,360 USD</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_fc79e6e5-5c0","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8494156002","x-work-arrangement":"hybrid","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["Python","Scala","Cloud ecosystems (AWS, Azure, GCP)","Apache Spark","CI/CD for production deployments","MLOps","Data engineering","Data science","Cloud technology"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:54:34.838Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Seattle, Washington"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"Python, Scala, Cloud ecosystems (AWS, Azure, GCP), Apache Spark, CI/CD for production deployments, MLOps, Data engineering, Data science, Cloud technology","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_1e3127c0-24c"},"title":"Platinum Support Representative, Tier 2","description":"<p>As a member of the Platinum Support team at Dialpad, you will deliver amazing service and support to our users by providing fast and accurate responses in a courteous and professional manner. You will handle user and partner inquiries ranging from simple product questions to more complex technical support issues. You will troubleshoot customer issues, escalate bug reports, and work to drive issue resolution. You will work effectively with a variety of internal teams, including Sales, Engineering, and Product Management. You will create and maintain tickets with our engineering team at a high technical level. You will monitor all live channels (chat, phone and web form) as you are scheduled to do so. You will communicate with Dialpad partners quickly and effectively in a professional manner. You will attend any and all advanced trainings to become an expert in our products and service. You will adhere to all policies and procedures set forth by Dialpad and the Director/Manager of the Platinum Support team. You will strive to be a team player and maintain the set SLA (Service Level Agreement) for each partner, Platinum support customer, general support interaction. You will maintain the required Quality Assurance score for the Platinum Support team. You will maintain the targeting number of tickets completed every week and strive for one-touch resolve.</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_1e3127c0-24c","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Dialpad","sameAs":"https://dialpad.com","logo":"https://logos.yubhub.co/dialpad.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/dialpad/jobs/8483991002","x-work-arrangement":"remote","x-experience-level":"mid","x-job-type":"full-time","x-salary-range":"$57,000-$72,000 CAD","x-skills-required":["College degree","Minimum of 5 years in customer support","Strong English skills","Good home computer and internet connectivity","Technical degree"],"x-skills-preferred":["Technical experience/knowledge surrounding LAN/WAN, Cloud technology and VoIP","Experience working in a call center environment"],"datePosted":"2026-04-18T15:52:34.327Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Kitchener, Canada"}},"jobLocationType":"TELECOMMUTE","employmentType":"FULL_TIME","occupationalCategory":"Customer Support","industry":"Technology","skills":"College degree, Minimum of 5 years in customer support, Strong English skills, Good home computer and internet connectivity, Technical degree, Technical experience/knowledge surrounding LAN/WAN, Cloud technology and VoIP, Experience working in a call center environment","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":57000,"maxValue":72000,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_c515a83d-ecc"},"title":"Strategic Core Account Executive - Discrete Manufacturing","description":"<p>We are looking for a strategic and consultative Strategic Account Executive to join the team in Germany and maximise the significant market opportunity that exists for Databricks within the industrial and manufacturing sector.</p>\n<p>As a primary driver of &quot;Industrie 4.0,&quot; this organisation is a cornerstone of the German economy, transforming how the world manufactures, moves, and heals. Dual-headquartered in Munich and Berlin, this powerhouse employs nearly 300,000 people across a vast multi-divisional ecosystem.</p>\n<p>The impact you&#39;ll have:</p>\n<ul>\n<li>You will be part of the large account team for this flagship account, driving sustained growth across consumption, expansion, and new business.</li>\n</ul>\n<ul>\n<li>You will consistently exceed growth targets by translating account strategy into clear, measurable commercial outcomes and disciplined execution.</li>\n</ul>\n<ul>\n<li>You will operate as a trusted C-suite advisor, influencing executive decisions and shaping enterprise-wide Data &amp; AI transformation initiatives.</li>\n</ul>\n<ul>\n<li>You will identify, prioritise, and scale high-value AI use cases to deliver measurable business outcomes.</li>\n</ul>\n<ul>\n<li>You will drive partner-led growth, working closely with system integrators and strategic partners to increase deal momentum and customer impact.</li>\n</ul>\n<ul>\n<li>You will lead complex, multi-stakeholder negotiations, closing transformational agreements that strengthen the strategic partnership.</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>Proven success selling advanced data, analytics, Big Data, AI, or complex cloud technology, closing complex, multi-stakeholder, multi-year enterprise agreements.</li>\n</ul>\n<ul>\n<li>A proven record of exceeding ambitious revenue goals in large, global enterprise accounts within the Industrial/Manufacturing vertical in Germany.</li>\n</ul>\n<ul>\n<li>Deep understanding of consumption-based growth models and how to scale strategic accounts from initial wins.</li>\n</ul>\n<ul>\n<li>Proficiency in structured sales methodologies (e.g., MEDDPICC, Value Selling).</li>\n</ul>\n<ul>\n<li>A history of building champion networks and leading cross-functional account teams around a clear strategy.</li>\n</ul>\n<ul>\n<li>Candidates with a consulting background who combine a strong sales profile with deep Industrial vertical expertise are encouraged to apply.</li>\n</ul>\n<ul>\n<li>Readiness to travel regularly within Germany and internationally to stay closely aligned with the customer.</li>\n</ul>\n<ul>\n<li>Fluency in German and English, with the gravitas to influence senior leaders up to C-level.</li>\n</ul>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_c515a83d-ecc","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8496220002","x-work-arrangement":"remote","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":null,"x-skills-required":["advanced data","analytics","Big Data","AI","complex cloud technology","structured sales methodologies","MEDDPICC","Value Selling"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:51:23.401Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Berlin, Germany"}},"jobLocationType":"TELECOMMUTE","employmentType":"FULL_TIME","occupationalCategory":"Sales","industry":"Manufacturing","skills":"advanced data, analytics, Big Data, AI, complex cloud technology, structured sales methodologies, MEDDPICC, Value Selling"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_ffd169d9-40b"},"title":"Resident Solutions Architect - Communications, Media, Entertainment & Games","description":"<p>As a Resident Solutions Architect in our Professional Services team, you will work with clients on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>You will report to the regional Manager/Lead.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>You will work on a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n</ul>\n<ul>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n</ul>\n<ul>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n</ul>\n<ul>\n<li>Consult on architecture and design; bootstrap or implement customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n</ul>\n<ul>\n<li>Provide an escalated level of support for customer operational issues.</li>\n</ul>\n<ul>\n<li>You will work with the Databricks technical team, Project Manager, Architect and Customer team to ensure the technical components of the engagement are delivered to meet customer&#39;s needs.</li>\n</ul>\n<ul>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues.</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>6+ years experience in data engineering, data platforms &amp; analytics</li>\n</ul>\n<ul>\n<li>Comfortable writing code in either Python or Scala</li>\n</ul>\n<ul>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n</ul>\n<ul>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n</ul>\n<ul>\n<li>Familiarity with CI/CD for production deployments</li>\n</ul>\n<ul>\n<li>Working knowledge of MLOps</li>\n</ul>\n<ul>\n<li>Design and deployment of performant end-to-end data architectures</li>\n</ul>\n<ul>\n<li>Experience with technical project delivery - managing scope and timelines.</li>\n</ul>\n<ul>\n<li>Documentation and white-boarding skills.</li>\n</ul>\n<ul>\n<li>Experience working with clients and managing conflicts.</li>\n</ul>\n<ul>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects.</li>\n</ul>\n<ul>\n<li>Travel to customers 20% of the time</li>\n</ul>\n<p>Pay Range Transparency Databricks is committed to fair and equitable compensation practices. The pay range(s) for this role is listed below and represents the expected base salary range for non-commissionable roles or on-target earnings for commissionable roles.</p>\n<p>Actual compensation packages are based on several factors that are unique to each candidate, including but not limited to job-related skills, depth of experience, relevant certifications and training, and specific work location.</p>\n<p>Based on the factors above, Databricks anticipated utilizing the full width of the range.</p>\n<p>The total compensation package for this position may also include eligibility for annual performance bonus, equity, and the benefits listed above.</p>\n<p>For more information regarding which range your location is in visit our page here.</p>\n<p>Zone 1 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 2 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 3 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 4 Pay Range $180,656-$248,360 USD</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_ffd169d9-40b","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8461239002","x-work-arrangement":"onsite","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["data engineering","data science","cloud technology","Apache Spark","CI/CD","MLOps","data platforms & analytics","Python","Scala","AWS","Azure","GCP"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:49:46.649Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Atlanta, Georgia"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, CI/CD, MLOps, data platforms & analytics, Python, Scala, AWS, Azure, GCP","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_bac99a46-7f5"},"title":"Resident Solutions Architect - Communications, Media, Entertainment & Games","description":"<p>As a Resident Solutions Architect in our Professional Services team, you will work with clients on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>You will report to the regional Manager/Lead.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>You will work on a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n</ul>\n<ul>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n</ul>\n<ul>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n</ul>\n<ul>\n<li>Consult on architecture and design; bootstrap or implement customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n</ul>\n<ul>\n<li>Provide an escalated level of support for customer operational issues.</li>\n</ul>\n<ul>\n<li>You will work with the Databricks technical team, Project Manager, Architect and Customer team to ensure the technical components of the engagement are delivered to meet customer&#39;s needs.</li>\n</ul>\n<ul>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues.</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>6+ years experience in data engineering, data platforms &amp; analytics</li>\n</ul>\n<ul>\n<li>Comfortable writing code in either Python or Scala</li>\n</ul>\n<ul>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n</ul>\n<ul>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n</ul>\n<ul>\n<li>Familiarity with CI/CD for production deployments</li>\n</ul>\n<ul>\n<li>Working knowledge of MLOps</li>\n</ul>\n<ul>\n<li>Design and deployment of performant end-to-end data architectures</li>\n</ul>\n<ul>\n<li>Experience with technical project delivery - managing scope and timelines.</li>\n</ul>\n<ul>\n<li>Documentation and white-boarding skills.</li>\n</ul>\n<ul>\n<li>Experience working with clients and managing conflicts.</li>\n</ul>\n<ul>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects.</li>\n</ul>\n<ul>\n<li>Travel to customers 20% of the time</li>\n</ul>\n<p>Databricks Certification</p>\n<p>Pay Range Transparency</p>\n<p>Databricks is committed to fair and equitable compensation practices. The pay range(s) for this role is listed below and represents the expected base salary range for non-commissionable roles or on-target earnings for commissionable roles. Actual compensation packages are based on several factors that are unique to each candidate, including but not limited to job-related skills, depth of experience, relevant certifications and training, and specific work location. Based on the factors above, Databricks anticipated utilizing the full width of the range. The total compensation package for this position may also include eligibility for annual performance bonus, equity, and the benefits listed above. For more information regarding which range your location is in visit our page here.</p>\n<p>Zone 1 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 2 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 3 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 4 Pay Range $180,656-$248,360 USD</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_bac99a46-7f5","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8461243002","x-work-arrangement":"onsite","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["data engineering","data science","cloud technology","Apache Spark","CI/CD","MLOps","distributed computing","Python","Scala","AWS","Azure","GCP"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:49:01.745Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Denver, Colorado"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, CI/CD, MLOps, distributed computing, Python, Scala, AWS, Azure, GCP","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_34b561b0-b37"},"title":"Director, Enterprise - Retail & CPG","description":"<p>We are looking for a Sales Director, Strategic Accounts to join our growing business in Germany. As a Sales Director, you will lead a team of Strategic Account Executives across the Retail and CPG verticals, mentoring, guiding, and empowering them to achieve and exceed their goals.</p>\n<p>Your primary focus will be on strategic account expansion, driving growth by expanding relationships with our most important customers. You will strengthen and scale the team through high-impact hiring, hands-on coaching, and by fostering a culture built on collaboration, accountability, and results.</p>\n<p>Key responsibilities include:</p>\n<ul>\n<li>Leveraging your business network to build a strong talent pipeline and hire top candidates as the team grows</li>\n<li>Creating a clear regional growth and investment plan within your first 90 days</li>\n<li>Building and sponsoring trusted relationships with customers and partners to drive long-term success in the region</li>\n<li>Ensuring accurate forecasting and creating a predictable, high-growth business</li>\n<li>Coaching your team to lead with a strong vision setting, methodology-based selling, and staying aligned to our customers&#39; goals and outcomes</li>\n<li>Developing a solid understanding of our product&#39;s technical details and roadmap to earn trust with key stakeholders</li>\n</ul>\n<p>We are looking for a proven people leader with 7+ years of experience leading high-performing Enterprise sales teams that sell into strategic global accounts in Germany. You should have a proven track record of developing high-performing teams in similar high-growth, Data, AI, Cloud, or SaaS/Tech companies, consistently exceeding ambitious sales goals.</p>\n<p>The ideal candidate will have extensive knowledge of the Retail and CPG vertical, and proven relationships within these accounts. You should know how to spot and grow great talent, building teams that raise the bar through trust, accountability, and shared success.</p>\n<p>Fluent in German and English is essential, and willing and able to travel to customer sites on a regular basis is required, as well as some international travel for internal meetings/events.</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_34b561b0-b37","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8287373002","x-work-arrangement":"onsite","x-experience-level":"executive","x-job-type":"full-time","x-salary-range":null,"x-skills-required":["Sales leadership","Strategic account management","Team management","Forecasting","Methodology-based selling","Data analysis","Cloud technology","SaaS/Tech","German language skills"],"x-skills-preferred":["Customer relationship management","Business development","Market research","Competitor analysis","Sales strategy development"],"datePosted":"2026-04-18T15:48:50.760Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Hesse, Germany"}},"employmentType":"FULL_TIME","occupationalCategory":"Sales","industry":"Technology","skills":"Sales leadership, Strategic account management, Team management, Forecasting, Methodology-based selling, Data analysis, Cloud technology, SaaS/Tech, German language skills, Customer relationship management, Business development, Market research, Competitor analysis, Sales strategy development"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_219928ef-6de"},"title":"Resident Solutions Architect - Healthcare & Life Sciences","description":"<p>As a Resident Solutions Architect in our Professional Services team, you will work with clients on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>You will report to the regional Manager/Lead.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>You will work on a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n</ul>\n<ul>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n</ul>\n<ul>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n</ul>\n<ul>\n<li>Consult on architecture and design; bootstrap or implement customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n</ul>\n<ul>\n<li>Provide an escalated level of support for customer operational issues.</li>\n</ul>\n<ul>\n<li>You will work with the Databricks technical team, Project Manager, Architect and Customer team to ensure the technical components of the engagement are delivered to meet customer&#39;s needs.</li>\n</ul>\n<ul>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues.</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>6+ years experience in data engineering, data platforms &amp; analytics</li>\n</ul>\n<ul>\n<li>Comfortable writing code in either Python or Scala</li>\n</ul>\n<ul>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n</ul>\n<ul>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n</ul>\n<ul>\n<li>Familiarity with CI/CD for production deployments</li>\n</ul>\n<ul>\n<li>Working knowledge of MLOps</li>\n</ul>\n<ul>\n<li>Design and deployment of performant end-to-end data architectures</li>\n</ul>\n<ul>\n<li>Experience with technical project delivery - managing scope and timelines.</li>\n</ul>\n<ul>\n<li>Documentation and white-boarding skills.</li>\n</ul>\n<ul>\n<li>Experience working with clients and managing conflicts.</li>\n</ul>\n<ul>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects.</li>\n</ul>\n<ul>\n<li>Travel to customers 20% of the time</li>\n</ul>\n<p>Databricks Certification</p>\n<p>Pay Range Transparency</p>\n<p>Databricks is committed to fair and equitable compensation practices. The pay range(s) for this role is listed below and represents the expected base salary range for non-commissionable roles or on-target earnings for commissionable roles. Actual compensation packages are based on several factors that are unique to each candidate, including but not limited to job-related skills, depth of experience, relevant certifications and training, and specific work location. Based on the factors above, Databricks anticipated utilizing the full width of the range. The total compensation package for this position may also include eligibility for annual performance bonus, equity, and the benefits listed above.</p>\n<p>For more information regarding which range your location is in visit our page here.</p>\n<p>Zone 1 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 2 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 3 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 4 Pay Range $180,656-$248,360 USD</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_219928ef-6de","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8494148002","x-work-arrangement":"hybrid","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["data engineering","data science","cloud technology","Apache Spark","CI/CD","MLOps","performant end-to-end data architectures","technical project delivery","documentation and white-boarding skills","client management"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:46:13.228Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Philadelphia, Pennsylvania"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, CI/CD, MLOps, performant end-to-end data architectures, technical project delivery, documentation and white-boarding skills, client management","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_f18e7306-00c"},"title":"Resident Solutions Architect - Financial Services","description":"<p>As a Senior Big Data Solutions Architect (Sr Resident Solutions Architect) in our Professional Services team, you will work with clients on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>You will report to the regional Manager/Lead.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>Work on a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n<li>Consult on architecture and design; bootstrap hands-on projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n<li>Provide an escalated level of support for customer operational issues.</li>\n<li>Work with the Databricks technical team, Project Manager, Architect and Customer team to ensure the technical components of the engagement are delivered to meet customer&#39;s needs.</li>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues.</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>9+ years experience in data engineering, data platforms &amp; analytics</li>\n<li>Comfortable writing code in either Python or Scala</li>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n<li>Deep experience with distributed computing with Apache Spark and knowledge of Apache Spark runtime internals</li>\n<li>Familiarity with CI/CD for production deployments</li>\n<li>Working knowledge of MLOps</li>\n<li>Capable of design and deployment of highly performant end-to-end data architectures</li>\n<li>Experience with technical project delivery - managing scope and timelines.</li>\n<li>Documentation and white-boarding skills.</li>\n<li>Experience working with clients and managing conflicts.</li>\n<li>Experience in building scalable streaming and batch solutions using cloud-native components</li>\n<li>Travel to customers up to 20% of the time</li>\n</ul>\n<p>Nice to have:</p>\n<ul>\n<li>Databricks Certification</li>\n</ul>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_f18e7306-00c","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8461325002","x-work-arrangement":"onsite","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["data engineering","data science","cloud technology","Apache Spark","Databricks","CI/CD","MLOps","technical project delivery","documentation","white-boarding","client management","conflict management","scalable streaming","batch solutions","cloud-native components"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:45:17.488Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Philadelphia, Pennsylvania"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, Databricks, CI/CD, MLOps, technical project delivery, documentation, white-boarding, client management, conflict management, scalable streaming, batch solutions, cloud-native components","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_61b49b86-6c8"},"title":"Resident Solutions Architect - Manufacturing","description":"<p>As a Resident Solutions Architect (RSA) on our Professional Services team, you will work with customers on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>Handle a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n</ul>\n<ul>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n</ul>\n<ul>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n</ul>\n<ul>\n<li>Consult on architecture and design; bootstrap or implement customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n</ul>\n<ul>\n<li>Provide an escalated level of support for customer operational issues</li>\n</ul>\n<ul>\n<li>Collaborate with the Databricks Technical, Project Manager, Architect and Customer teams to ensure the technical components of the engagement are delivered to meet customer&#39;s needs</li>\n</ul>\n<ul>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues</li>\n</ul>\n<p>You will report to the regional Manager/Lead.</p>\n<p>What we look for:</p>\n<ul>\n<li>6+ years experience in data engineering, data platforms &amp; analytics</li>\n</ul>\n<ul>\n<li>Comfortable writing code in either Python or Scala</li>\n</ul>\n<ul>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n</ul>\n<ul>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n</ul>\n<ul>\n<li>Familiarity with CI/CD for production deployments</li>\n</ul>\n<ul>\n<li>Working knowledge of MLOps</li>\n</ul>\n<ul>\n<li>Design and deployment of performant end-to-end data architectures</li>\n</ul>\n<ul>\n<li>Experience with technical project delivery - managing scope and timelines</li>\n</ul>\n<ul>\n<li>Documentation and white-boarding skills</li>\n</ul>\n<ul>\n<li>Experience working with clients and managing conflicts</li>\n</ul>\n<ul>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects</li>\n</ul>\n<ul>\n<li>Ability to travel up to 30% when needed</li>\n</ul>\n<p>Pay Range Transparency Databricks is committed to fair and equitable compensation practices. The pay range(s) for this role is listed below and represents the expected base salary range for non-commissionable roles or on-target earnings for commissionable roles.</p>\n<p>Actual compensation packages are based on several factors that are unique to each candidate, including but not limited to job-related skills, depth of experience, relevant certifications and training, and specific work location.</p>\n<p>Based on the factors above, Databricks anticipated utilizing the full width of the range.</p>\n<p>The total compensation package for this position may also include eligibility for annual performance bonus, equity, and the benefits listed above.</p>\n<p>For more information regarding which range your location is in visit our page here.</p>\n<p>Zone 1 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 2 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 3 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 4 Pay Range $180,656-$248,360 USD</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_61b49b86-6c8","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8341313002","x-work-arrangement":"onsite","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["data engineering","data science","cloud technology","Apache Spark","CI/CD","MLOps","distributed computing","Python","Scala","AWS","Azure","GCP"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:44:54.724Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"New York City, New York"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, CI/CD, MLOps, distributed computing, Python, Scala, AWS, Azure, GCP","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_6ea8bf6b-ef6"},"title":"Resident Solutions Architect - Communications, Media, Entertainment & Games","description":"<p>As a Resident Solutions Architect in our Professional Services team, you will work with clients on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>You will report to the regional Manager/Lead.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>Work on a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n<li>Consult on architecture and design; bootstrap or implement customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n<li>Provide an escalated level of support for customer operational issues.</li>\n<li>Work with the Databricks technical team, Project Manager, Architect and Customer team to ensure the technical components of the engagement are delivered to meet customer&#39;s needs.</li>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues.</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>6+ years experience in data engineering, data platforms &amp; analytics</li>\n<li>Comfortable writing code in either Python or Scala</li>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n<li>Familiarity with CI/CD for production deployments</li>\n<li>Working knowledge of MLOps</li>\n<li>Design and deployment of performant end-to-end data architectures</li>\n<li>Experience with technical project delivery - managing scope and timelines.</li>\n<li>Documentation and white-boarding skills.</li>\n<li>Experience working with clients and managing conflicts.</li>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects.</li>\n<li>Travel to customers 20% of the time</li>\n</ul>\n<p>Databricks Certification</p>\n<p>Pay Range Transparency</p>\n<p>Databricks is committed to fair and equitable compensation practices. The pay range(s) for this role is listed below and represents the expected base salary range for non-commissionable roles or on-target earnings for commissionable roles.</p>\n<p>Actual compensation packages are based on several factors that are unique to each candidate, including but not limited to job-related skills, depth of experience, relevant certifications and training, and specific work location.</p>\n<p>Based on the factors above, Databricks anticipated utilizing the full width of the range.</p>\n<p>The total compensation package for this position may also include eligibility for annual performance bonus, equity, and the benefits listed above.</p>\n<p>For more information regarding which range your location is in visit our page here.</p>\n<p>Zone 1 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 2 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 3 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 4 Pay Range $180,656-$248,360 USD</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_6ea8bf6b-ef6","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8494153002","x-work-arrangement":"hybrid","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["data engineering","data science","cloud technology","Apache Spark","CI/CD","MLOps","Python","Scala","AWS","Azure","GCP"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:43:39.634Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Philadelphia, Pennsylvania"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, CI/CD, MLOps, Python, Scala, AWS, Azure, GCP","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_62b2a5a2-9bd"},"title":"Big Data Solutions Architect (Professional Services)","description":"<p>As a Big Data Solutions Architect in our Professional Services team, you will work with clients on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>You will report to the regional Manager/Lead.</p>\n<p>Key responsibilities include:</p>\n<ul>\n<li>Working on a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n</ul>\n<ul>\n<li>Working with engagement managers to scope variety of professional services work with input from the customer</li>\n</ul>\n<ul>\n<li>Guiding strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n</ul>\n<ul>\n<li>Consulting on architecture and design; bootstrapping or implementing customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks</li>\n</ul>\n<ul>\n<li>Providing an escalated level of support for customer operational issues</li>\n</ul>\n<ul>\n<li>Working with the Databricks technical team, Project Manager, Architect and Customer team to ensure the technical components of the engagement are delivered to meet customer&#39;s needs</li>\n</ul>\n<ul>\n<li>Working with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>6+ years experience in data engineering, data platforms &amp; analytics</li>\n</ul>\n<ul>\n<li>Strong expertise in data warehousing concepts, architecture, and migration strategies</li>\n</ul>\n<ul>\n<li>Comfortable writing code in either Python, Pyspark or Scala</li>\n</ul>\n<ul>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n</ul>\n<ul>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n</ul>\n<ul>\n<li>Familiarity with CI/CD for production deployments</li>\n</ul>\n<ul>\n<li>Working knowledge of MLOps</li>\n</ul>\n<ul>\n<li>Design and deployment of performant end-to-end data architectures</li>\n</ul>\n<ul>\n<li>Experience with technical project delivery - managing scope and timelines</li>\n</ul>\n<ul>\n<li>Documentation and white-boarding skills</li>\n</ul>\n<ul>\n<li>Experience working with clients and managing conflicts</li>\n</ul>\n<ul>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects</li>\n</ul>\n<ul>\n<li>Data Science expertise is a nice-to-have</li>\n</ul>\n<ul>\n<li>Travel to customers 10-20% of the time</li>\n</ul>\n<ul>\n<li>Databricks Certification</li>\n</ul>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_62b2a5a2-9bd","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8482697002","x-work-arrangement":"onsite","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":null,"x-skills-required":["data engineering","data science","cloud technology","Apache Spark","CI/CD","MLOps","data warehousing","migration strategies","Python","Pyspark","Scala","AWS","Azure","GCP"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:43:16.680Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Paris, France"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, CI/CD, MLOps, data warehousing, migration strategies, Python, Pyspark, Scala, AWS, Azure, GCP"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_9d5fcc78-b2b"},"title":"Resident Solutions Architect - Public Sector","description":"<p>As a Resident Solutions Architect in our Professional Services team, you will work with clients on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>You will report to the regional Manager/Lead.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>You will work on a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n</ul>\n<ul>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n</ul>\n<ul>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n</ul>\n<ul>\n<li>Consult on architecture and design; bootstrap or implement customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n</ul>\n<ul>\n<li>Provide an escalated level of support for customer operational issues.</li>\n</ul>\n<ul>\n<li>You will work with the Databricks technical team, Project Manager, Architect and Customer team to ensure the technical components of the engagement are delivered to meet customer&#39;s needs.</li>\n</ul>\n<ul>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues.</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>6+ years experience in data engineering, data platforms &amp; analytics</li>\n</ul>\n<ul>\n<li>Comfortable writing code in either Python or Scala</li>\n</ul>\n<ul>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n</ul>\n<ul>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n</ul>\n<ul>\n<li>Familiarity with CI/CD for production deployments</li>\n</ul>\n<ul>\n<li>Working knowledge of MLOps</li>\n</ul>\n<ul>\n<li>Design and deployment of performant end-to-end data architectures</li>\n</ul>\n<ul>\n<li>Experience with technical project delivery - managing scope and timelines.</li>\n</ul>\n<ul>\n<li>Documentation and white-boarding skills.</li>\n</ul>\n<ul>\n<li>Experience working with clients and managing conflicts.</li>\n</ul>\n<ul>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects.</li>\n</ul>\n<ul>\n<li>Travel to customers 20% of the time</li>\n</ul>\n<p>Databricks Certification Pay Range Transparency Databricks is committed to fair and equitable compensation practices. The pay range(s) for this role is listed below and represents the expected base salary range for non-commissionable roles or on-target earnings for commissionable roles. Actual compensation packages are based on several factors that are unique to each candidate, including but not limited to job-related skills, depth of experience, relevant certifications and training, and specific work location. Based on the factors above, Databricks anticipated utilizing the full width of the range. The total compensation package for this position may also include eligibility for annual performance bonus, equity, and the benefits listed above. For more information regarding which range your location is in visit our page here.</p>\n<p>Zone 1 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 2 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 3 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 4 Pay Range $180,656-$248,360 USD</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_9d5fcc78-b2b","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8423296002","x-work-arrangement":"onsite","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["data engineering","data science","cloud technology","Apache Spark","CI/CD","MLOps","performant end-to-end data architectures","technical project delivery","documentation and white-boarding skills","client management"],"x-skills-preferred":["Python","Scala","AWS","Azure","GCP","distributed computing","Spark runtime internals"],"datePosted":"2026-04-18T15:42:27.646Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Central - United States; Northeast - United States; Southeast - United States"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, CI/CD, MLOps, performant end-to-end data architectures, technical project delivery, documentation and white-boarding skills, client management, Python, Scala, AWS, Azure, GCP, distributed computing, Spark runtime internals","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_8131cff5-1a9"},"title":"Resident Solutions Architect - Communications, Media, Entertainment & Games","description":"<p>As a Resident Solutions Architect in our Professional Services team, you will work with clients on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>You will report to the regional Manager/Lead.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>You will work on a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n<li>Consult on architecture and design; bootstrap or implement customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n<li>Provide an escalated level of support for customer operational issues.</li>\n<li>You will work with the Databricks technical team, Project Manager, Architect and Customer team to ensure the technical components of the engagement are delivered to meet customer&#39;s needs.</li>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues.</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>6+ years experience in data engineering, data platforms &amp; analytics</li>\n<li>Comfortable writing code in either Python or Scala</li>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n<li>Familiarity with CI/CD for production deployments</li>\n<li>Working knowledge of MLOps</li>\n<li>Design and deployment of performant end-to-end data architectures</li>\n<li>Experience with technical project delivery - managing scope and timelines.</li>\n<li>Documentation and white-boarding skills.</li>\n<li>Experience working with clients and managing conflicts.</li>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects.</li>\n<li>Travel to customers 20% of the time</li>\n</ul>\n<p>Databricks Certification</p>\n<p>Pay Range Transparency</p>\n<p>Databricks is committed to fair and equitable compensation practices. The pay range(s) for this role is listed below and represents the expected base salary range for non-commissionable roles or on-target earnings for commissionable roles. Actual compensation packages are based on several factors that are unique to each candidate, including but not limited to job-related skills, depth of experience, relevant certifications and training, and specific work location. Based on the factors above, Databricks anticipated utilizing the full width of the range. The total compensation package for this position may also include eligibility for annual performance bonus, equity, and the benefits listed above. For more information regarding which range your location is in visit our page here.</p>\n<p>Zone 1 Pay Range $180,656-$248,360 USD Zone 2 Pay Range $180,656-$248,360 USD Zone 3 Pay Range $180,656-$248,360 USD Zone 4 Pay Range $180,656-$248,360 USD</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_8131cff5-1a9","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8341311002","x-work-arrangement":"onsite","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["data engineering","data science","cloud technology","Apache Spark","CI/CD","MLOps","distributed computing","Python","Scala","AWS","Azure","GCP"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:42:15.014Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"United States"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, CI/CD, MLOps, distributed computing, Python, Scala, AWS, Azure, GCP","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_b647b7da-f8f"},"title":"Resident Solutions Architect - Public Sector","description":"<p>As a Resident Solutions Architect (RSA) on our Professional Services team, you will work with customers on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>You will report to the regional Manager/Lead.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>Handle a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n</ul>\n<ul>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n</ul>\n<ul>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n</ul>\n<ul>\n<li>Consult on architecture and design; bootstrap or implement customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n</ul>\n<ul>\n<li>Provide an escalated level of support for customer operational issues</li>\n</ul>\n<ul>\n<li>Collaborate with the Databricks Technical, Project Manager, Architect and Customer teams to ensure the technical components of the engagement are delivered to meet customer&#39;s needs</li>\n</ul>\n<ul>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>US Top Secret Clearance Required this position</li>\n</ul>\n<ul>\n<li>6+ years experience in data engineering, data platforms &amp; analytics</li>\n</ul>\n<ul>\n<li>Comfortable writing code in either Python or Scala</li>\n</ul>\n<ul>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n</ul>\n<ul>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n</ul>\n<ul>\n<li>Familiarity with CI/CD for production deployments</li>\n</ul>\n<ul>\n<li>Working knowledge of MLOps</li>\n</ul>\n<ul>\n<li>Design and deployment of performant end-to-end data architectures</li>\n</ul>\n<ul>\n<li>Experience with technical project delivery - managing scope and timelines</li>\n</ul>\n<ul>\n<li>Documentation and white-boarding skills</li>\n</ul>\n<ul>\n<li>Experience working with clients and managing conflicts</li>\n</ul>\n<ul>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects</li>\n</ul>\n<ul>\n<li>Bachelor&#39;s degree in Computer Science, Information Systems, Engineering, or equivalent experience through work experience</li>\n</ul>\n<ul>\n<li>Ability to travel up to 30% when needed</li>\n</ul>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_b647b7da-f8f","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8494107002","x-work-arrangement":"onsite","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["data engineering","data science","cloud technology","Apache Spark","CI/CD","MLOps","distributed computing","Python","Scala","AWS","Azure","GCP"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:42:08.402Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Virginia"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, CI/CD, MLOps, distributed computing, Python, Scala, AWS, Azure, GCP","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_8d1ca2f5-7be"},"title":"Resident Solutions Architect - Communications, Media, Entertainment & Games","description":"<p>As a Resident Solutions Architect in our Professional Services team, you will work with clients on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>You will report to the regional Manager/Lead.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>Work on a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n<li>Consult on architecture and design; bootstrap or implement customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n<li>Provide an escalated level of support for customer operational issues.</li>\n<li>Work with the Databricks technical team, Project Manager, Architect and Customer team to ensure the technical components of the engagement are delivered to meet customer&#39;s needs.</li>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues.</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>6+ years experience in data engineering, data platforms &amp; analytics</li>\n<li>Comfortable writing code in either Python or Scala</li>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n<li>Familiarity with CI/CD for production deployments</li>\n<li>Working knowledge of MLOps</li>\n<li>Design and deployment of performant end-to-end data architectures</li>\n<li>Experience with technical project delivery - managing scope and timelines.</li>\n<li>Documentation and white-boarding skills.</li>\n<li>Experience working with clients and managing conflicts.</li>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects.</li>\n<li>Travel to customers 20% of the time</li>\n</ul>\n<p>Databricks Certification</p>\n<p>Pay Range Transparency</p>\n<p>Databricks is committed to fair and equitable compensation practices. The pay range(s) for this role is listed below and represents the expected base salary range for non-commissionable roles or on-target earnings for commissionable roles.</p>\n<p>Actual compensation packages are based on several factors that are unique to each candidate, including but not limited to job-related skills, depth of experience, relevant certifications and training, and specific work location.</p>\n<p>Based on the factors above, Databricks anticipated utilizing the full width of the range.</p>\n<p>The total compensation package for this position may also include eligibility for annual performance bonus, equity, and the benefits listed above.</p>\n<p>For more information regarding which range your location is in visit our page here.</p>\n<p>Zone 1 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 2 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 3 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 4 Pay Range $180,656-$248,360 USD</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_8d1ca2f5-7be","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8461220002","x-work-arrangement":"onsite","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["data engineering","data science","cloud technology","Apache Spark","CI/CD","MLOps","distributed computing","Python","Scala","AWS","Azure","GCP"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:42:04.881Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Chicago, Illinois"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, CI/CD, MLOps, distributed computing, Python, Scala, AWS, Azure, GCP","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_6860353a-782"},"title":"Resident Solutions Architect - Communications, Media, Entertainment & Games","description":"<p>As a Resident Solutions Architect in our Professional Services team, you will work with clients on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>You will report to the regional Manager/Lead.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>Work on a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n<li>Consult on architecture and design; bootstrap or implement customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n<li>Provide an escalated level of support for customer operational issues.</li>\n<li>Work with the Databricks technical team, Project Manager, Architect and Customer team to ensure the technical components of the engagement are delivered to meet customer&#39;s needs.</li>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues.</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>6+ years experience in data engineering, data platforms &amp; analytics</li>\n<li>Comfortable writing code in either Python or Scala</li>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n<li>Familiarity with CI/CD for production deployments</li>\n<li>Working knowledge of MLOps</li>\n<li>Design and deployment of performant end-to-end data architectures</li>\n<li>Experience with technical project delivery - managing scope and timelines.</li>\n<li>Documentation and white-boarding skills.</li>\n<li>Experience working with clients and managing conflicts.</li>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects.</li>\n<li>Travel to customers 20% of the time</li>\n</ul>\n<p>Databricks Certification</p>\n<p>Pay Range Transparency</p>\n<p>Databricks is committed to fair and equitable compensation practices. The pay range(s) for this role is listed below and represents the expected base salary range for non-commissionable roles or on-target earnings for commissionable roles.</p>\n<p>Actual compensation packages are based on several factors that are unique to each candidate, including but not limited to job-related skills, depth of experience, relevant certifications and training, and specific work location.</p>\n<p>Based on the factors above, Databricks anticipated utilizing the full width of the range.</p>\n<p>The total compensation package for this position may also include eligibility for annual performance bonus, equity, and the benefits listed above.</p>\n<p>For more information regarding which range your location is in visit our page here.</p>\n<p>Zone 1 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 2 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 3 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 4 Pay Range $180,656-$248,360 USD</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_6860353a-782","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8461241002","x-work-arrangement":"onsite","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["data engineering","data science","cloud technology","Apache Spark","CI/CD","MLOps","distributed computing","big data","AI"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:41:53.366Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Washington, D.C."}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, CI/CD, MLOps, distributed computing, big data, AI","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_eb3ba652-daa"},"title":"Resident Solutions Architect - Communications, Media, Entertainment & Games","description":"<p>As a Resident Solutions Architect in our Professional Services team, you will work with clients on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>You will report to the regional Manager/Lead.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>Work on a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n<li>Consult on architecture and design; bootstrap or implement customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n<li>Provide an escalated level of support for customer operational issues.</li>\n<li>Work with the Databricks technical team, Project Manager, Architect and Customer team to ensure the technical components of the engagement are delivered to meet customer&#39;s needs.</li>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues.</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>6+ years experience in data engineering, data platforms &amp; analytics</li>\n<li>Comfortable writing code in either Python or Scala</li>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n<li>Familiarity with CI/CD for production deployments</li>\n<li>Working knowledge of MLOps</li>\n<li>Design and deployment of performant end-to-end data architectures</li>\n<li>Experience with technical project delivery - managing scope and timelines.</li>\n<li>Documentation and white-boarding skills.</li>\n<li>Experience working with clients and managing conflicts.</li>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects.</li>\n<li>Travel to customers 20% of the time</li>\n</ul>\n<p>Databricks Certification</p>\n<p>Pay Range Transparency</p>\n<p>Databricks is committed to fair and equitable compensation practices. The pay range(s) for this role is listed below and represents the expected base salary range for non-commissionable roles or on-target earnings for commissionable roles.</p>\n<p>Actual compensation packages are based on several factors that are unique to each candidate, including but not limited to job-related skills, depth of experience, relevant certifications and training, and specific work location.</p>\n<p>Based on the factors above, Databricks anticipated utilizing the full width of the range.</p>\n<p>The total compensation package for this position may also include eligibility for annual performance bonus, equity, and the benefits listed above.</p>\n<p>For more information regarding which range your location is in visit our page here.</p>\n<p>Zone 1 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 2 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 3 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 4 Pay Range $180,656-$248,360 USD</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_eb3ba652-daa","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8461163002","x-work-arrangement":"onsite","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["data engineering","data science","cloud technology","Apache Spark","CI/CD","MLOps","distributed computing","Python","Scala","AWS","Azure","GCP"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:41:52.535Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"San Francisco, California"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, CI/CD, MLOps, distributed computing, Python, Scala, AWS, Azure, GCP","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_c7179545-496"},"title":"Resident Solutions Architect (Professional Services)","description":"<p>We&#39;re hiring for multiple roles within our Professional Services team. As a Resident Solutions Architect, you will work with clients on short to medium-term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>Work on a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n</ul>\n<ul>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n</ul>\n<ul>\n<li>Consult on architecture and design; bootstrap or implement customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks</li>\n</ul>\n<ul>\n<li>Provide an escalated level of support for customer operational issues</li>\n</ul>\n<ul>\n<li>Work with the Databricks technical team, Project Manager, Architect and Customer team to ensure the technical components of the engagement are delivered to meet customer&#39;s needs</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>Extensive experience in data engineering, data platforms &amp; analytics</li>\n</ul>\n<ul>\n<li>Comfortable writing code in either Python or Scala</li>\n</ul>\n<ul>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n</ul>\n<ul>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n</ul>\n<ul>\n<li>Familiarity with CI/CD for production deployments</li>\n</ul>\n<ul>\n<li>Working knowledge of MLOps</li>\n</ul>\n<ul>\n<li>Design and deployment of performant end-to-end data architectures</li>\n</ul>\n<ul>\n<li>Experience with technical project delivery - managing scope and timelines</li>\n</ul>\n<ul>\n<li>Documentation and white-boarding skills</li>\n</ul>\n<ul>\n<li>Experience working with clients and managing conflicts</li>\n</ul>\n<ul>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects</li>\n</ul>\n<ul>\n<li>Travel to customers 10% of the time</li>\n</ul>\n<ul>\n<li>[Preferred] Databricks Certification but not essential</li>\n</ul>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_c7179545-496","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8367942002","x-work-arrangement":"remote","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":null,"x-skills-required":["data engineering","data science","cloud technology","Apache Spark","CI/CD","MLOps","technical project delivery","documentation","white-boarding"],"x-skills-preferred":["Databricks Certification"],"datePosted":"2026-04-18T15:41:31.345Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"London, United Kingdom"}},"jobLocationType":"TELECOMMUTE","employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, CI/CD, MLOps, technical project delivery, documentation, white-boarding, Databricks Certification"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_5a6826b9-345"},"title":"Account Executive, Singapore","description":"<p>We are seeking a creative, driven, and execution-oriented Enterprise Account Executive to sell and maximise the huge market opportunity that exists for Databricks today.</p>\n<p>As an Enterprise Account Executive reporting to the Regional Sales Director, you will have experience selling in the Enterprise segment. Your informed point of view on Big Data and Advanced Analytics will guide your successful sales strategy together with our teams and partners, allowing you to provide value to our biggest and most valued customers.</p>\n<p>Key responsibilities include:</p>\n<ul>\n<li>Evangelising Databricks&#39; Unified Analytics Platform powered by Spark and launching the Databricks brand in Enterprise Accounts across all industries</li>\n<li>Prospecting, identifying and sourcing new sales opportunities, building pipeline individually and with the Databricks SDR team</li>\n<li>Engaging with business and technical decision-makers and leading them through the evaluation and buying process</li>\n<li>Exceeding individual activity, pipeline, and annual revenue targets</li>\n<li>Engaging with and driving business through local partners (technology partners, ISVs, SIs, and GSIs)</li>\n<li>Driving customer success and upselling existing customers</li>\n<li>Creating a Territory Strategy across all industries</li>\n</ul>\n<p>Requirements include:</p>\n<ul>\n<li>5+ years of experience selling SaaS solutions to Enterprise customers in ASEAN region</li>\n<li>Proven success in Enterprise Sales roles, ideally in big data, Cloud, or SaaS technology</li>\n<li>Demonstrable experience in selling innovation, ideally in big data, Cloud, or SaaS technology</li>\n<li>Solution and business-outcomes-focused sales approach</li>\n<li>Ability to simply articulate intricate cloud &amp; big data technologies and their business value for the customer</li>\n</ul>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_5a6826b9-345","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/7726495002","x-work-arrangement":"onsite","x-experience-level":"executive","x-job-type":"full-time","x-salary-range":null,"x-skills-required":["SaaS solutions","Enterprise sales","Big data","Cloud technology","Sales strategy","Customer success","Territory strategy"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:41:29.168Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Singapore"}},"employmentType":"FULL_TIME","occupationalCategory":"Sales","industry":"Technology","skills":"SaaS solutions, Enterprise sales, Big data, Cloud technology, Sales strategy, Customer success, Territory strategy"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_3827f936-fc2"},"title":"Resident Solutions Architect - Financial Services","description":"<p>Job Title: Resident Solutions Architect - Financial Services</p>\n<p>As a Senior Big Data Solutions Architect (Sr Resident Solutions Architect) in our Professional Services team, you will work with clients on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>You will report to the regional Manager/Lead.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>Work on a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n</ul>\n<ul>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n</ul>\n<ul>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n</ul>\n<ul>\n<li>Consult on architecture and design; bootstrap hands-on projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n</ul>\n<ul>\n<li>Provide an escalated level of support for customer operational issues.</li>\n</ul>\n<ul>\n<li>You will work with the Databricks technical team, Project Manager, Architect and Customer team to ensure the technical components of the engagement are delivered to meet customer&#39;s needs.</li>\n</ul>\n<ul>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues.</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>9+ years experience in data engineering, data platforms &amp; analytics</li>\n</ul>\n<ul>\n<li>Comfortable writing code in either Python or Scala</li>\n</ul>\n<ul>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n</ul>\n<ul>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Apache Spark™ runtime internals</li>\n</ul>\n<ul>\n<li>Familiarity with CI/CD for production deployments</li>\n</ul>\n<ul>\n<li>Working knowledge of MLOps</li>\n</ul>\n<ul>\n<li>Capable of design and deployment of highly performant end-to-end data architectures</li>\n</ul>\n<ul>\n<li>Experience with technical project delivery - managing scope and timelines.</li>\n</ul>\n<ul>\n<li>Documentation and white-boarding skills.</li>\n</ul>\n<ul>\n<li>Experience working with clients and managing conflicts.</li>\n</ul>\n<ul>\n<li>Experience in building scalable streaming and batch solutions using cloud-native components</li>\n</ul>\n<ul>\n<li>Travel to customers up to 20% of the time</li>\n</ul>\n<p>Nice to have: Databricks Certification</p>\n<p>Pay Range Transparency</p>\n<p>Databricks is committed to fair and equitable compensation practices. The pay range(s) for this role is listed below and represents the expected base salary range for non-commissionable roles or on-target earnings for commissionable roles.</p>\n<p>Actual compensation packages are based on several factors that are unique to each candidate, including but not limited to job-related skills, depth of experience, relevant certifications and training, and specific work location.</p>\n<p>Based on the factors above, Databricks anticipated utilizing the full width of the range.</p>\n<p>The total compensation package for this position may also include eligibility for annual performance bonus, equity, and the benefits listed above.</p>\n<p>For more information regarding which range your location is in visit our page here.</p>\n<p>Zone 1 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 2 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 3 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 4 Pay Range $180,656-$248,360 USD</p>\n<p>About Databricks</p>\n<p>Databricks is the data and AI company. More than 10,000 organizations worldwide , including Comcast, Condé Nast, Grammarly, and over 50% of the Fortune 500 , rely on the Databricks Data Intelligence Platform to unify and democratize data, analytics and AI.</p>\n<p>Databricks is headquartered in San Francisco, with offices around the globe and was founded by the original creators of Lakehouse, Apache Spark™, Delta Lake and MLflow.</p>\n<p>To learn more, follow Databricks on Twitter, LinkedIn and Facebook.</p>\n<p>Benefits</p>\n<p>At Databricks, we strive to provide comprehensive benefits and perks that meet the needs of all of our employees.</p>\n<p>For specific details on the benefits offered in your region click here.</p>\n<p>Our Commitment to Diversity and Inclusion</p>\n<p>At Databricks, we are committed to fostering a diverse and inclusive culture where everyone can excel.</p>\n<p>We take great care to ensure that our hiring practices are inclusive and meet equal employment opportunity standards.</p>\n<p>Individuals looking for employment at Databricks are considered without regard to age, color, disability, ethnicity, family or marital status, gender identity or expression, language, national origin, physical and mental ability, political affiliation, race, religion, sexual orientation, socio-economic status, veteran status, and other protected characteristics.</p>\n<p>Compliance</p>\n<p>If access to export-controlled technology or source code is required for performance of job duties, it is within Employer&#39;s discretion whether to apply for a U.S. government license for such positions, and Employer may decline to proceed with an applicant on this basis alone.</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_3827f936-fc2","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8461326002","x-work-arrangement":"remote","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["data engineering","data science","cloud technology","Apache Spark","CI/CD","MLOps","distributed computing","Python","Scala","Cloud ecosystems","AWS","Azure","GCP"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:40:59.293Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"New York City, New York"}},"jobLocationType":"TELECOMMUTE","employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, CI/CD, MLOps, distributed computing, Python, Scala, Cloud ecosystems, AWS, Azure, GCP","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_9223ca6d-d9e"},"title":"Resident Solutions Architect - Communications, Media, Entertainment & Games","description":"<p>As a Resident Solutions Architect in our Professional Services team, you will work with clients on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>You will report to the regional Manager/Lead.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>Work on a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n<li>Consult on architecture and design; bootstrap or implement customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n<li>Provide an escalated level of support for customer operational issues.</li>\n<li>Work with the Databricks technical team, Project Manager, Architect and Customer team to ensure the technical components of the engagement are delivered to meet customer&#39;s needs.</li>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues.</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>6+ years experience in data engineering, data platforms &amp; analytics</li>\n<li>Comfortable writing code in either Python or Scala</li>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n<li>Familiarity with CI/CD for production deployments</li>\n<li>Working knowledge of MLOps</li>\n<li>Design and deployment of performant end-to-end data architectures</li>\n<li>Experience with technical project delivery - managing scope and timelines.</li>\n<li>Documentation and white-boarding skills.</li>\n<li>Experience working with clients and managing conflicts.</li>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects.</li>\n<li>Travel to customers 20% of the time</li>\n</ul>\n<p>Databricks Certification</p>\n<p>Pay Range Transparency</p>\n<p>Databricks is committed to fair and equitable compensation practices. The pay range(s) for this role is listed below and represents the expected base salary range for non-commissionable roles or on-target earnings for commissionable roles.</p>\n<p>Actual compensation packages are based on several factors that are unique to each candidate, including but not limited to job-related skills, depth of experience, relevant certifications and training, and specific work location.</p>\n<p>Based on the factors above, Databricks anticipated utilizing the full width of the range.</p>\n<p>The total compensation package for this position may also include eligibility for annual performance bonus, equity, and the benefits listed above.</p>\n<p>For more information regarding which range your location is in visit our page here.</p>\n<p>Zone 1 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 2 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 3 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 4 Pay Range $180,656-$248,360 USD</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_9223ca6d-d9e","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8461193002","x-work-arrangement":"onsite","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["data engineering","data science","cloud technology","Apache Spark","Python","Scala","CI/CD","MLOps","distributed computing"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:40:33.675Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Seattle, Washington"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, Python, Scala, CI/CD, MLOps, distributed computing","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_8d65cea1-fd1"},"title":"Resident Solutions Architect - Communications, Media, Entertainment & Games","description":"<p>As a Resident Solutions Architect in our Professional Services team, you will work with clients on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>You will report to the regional Manager/Lead.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>Work on a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n<li>Consult on architecture and design; bootstrap or implement customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n<li>Provide an escalated level of support for customer operational issues.</li>\n<li>Work with the Databricks technical team, Project Manager, Architect and Customer team to ensure the technical components of the engagement are delivered to meet customer&#39;s needs.</li>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues.</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>6+ years experience in data engineering, data platforms &amp; analytics</li>\n<li>Comfortable writing code in either Python or Scala</li>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n<li>Familiarity with CI/CD for production deployments</li>\n<li>Working knowledge of MLOps</li>\n<li>Design and deployment of performant end-to-end data architectures</li>\n<li>Experience with technical project delivery - managing scope and timelines.</li>\n<li>Documentation and white-boarding skills.</li>\n<li>Experience working with clients and managing conflicts.</li>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects.</li>\n<li>Travel to customers 20% of the time</li>\n</ul>\n<p>Databricks Certification</p>\n<p>Pay Range Transparency</p>\n<p>Databricks is committed to fair and equitable compensation practices. The pay range(s) for this role is listed below and represents the expected base salary range for non-commissionable roles or on-target earnings for commissionable roles.</p>\n<p>Actual compensation packages are based on several factors that are unique to each candidate, including but not limited to job-related skills, depth of experience, relevant certifications and training, and specific work location.</p>\n<p>Based on the factors above, Databricks anticipated utilizing the full width of the range.</p>\n<p>The total compensation package for this position may also include eligibility for annual performance bonus, equity, and the benefits listed above.</p>\n<p>For more information regarding which range your location is in visit our page here.</p>\n<p>Zone 1 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 2 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 3 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 4 Pay Range $180,656-$248,360 USD</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_8d65cea1-fd1","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8461219002","x-work-arrangement":"onsite","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["data engineering","data science","cloud technology","Apache Spark","CI/CD","MLOps","distributed computing","Python","Scala","AWS","Azure","GCP"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:40:01.213Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Austin, Texas"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, CI/CD, MLOps, distributed computing, Python, Scala, AWS, Azure, GCP","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_507bea17-ad7"},"title":"Resident Solutions Architect - Communications, Media, Entertainment & Games","description":"<p>As a Resident Solutions Architect in our Professional Services team, you will work with clients on short to medium term customer engagements on their big data challenges using the Databricks platform.</p>\n<p>You will provide data engineering, data science, and cloud technology projects which require integrating with client systems, training, and other technical tasks to help customers to get most value out of their data.</p>\n<p>RSAs are billable and know how to complete projects according to specification with excellent customer service.</p>\n<p>You will report to the regional Manager/Lead.</p>\n<p>The impact you will have:</p>\n<ul>\n<li>Work on a variety of impactful customer technical projects which may include designing and building reference architectures, creating how-to&#39;s and productionalizing customer use cases</li>\n<li>Work with engagement managers to scope variety of professional services work with input from the customer</li>\n<li>Guide strategic customers as they implement transformational big data projects, 3rd party migrations, including end-to-end design, build and deployment of industry-leading big data and AI applications</li>\n<li>Consult on architecture and design; bootstrap or implement customer projects which leads to a customers&#39; successful understanding, evaluation and adoption of Databricks.</li>\n<li>Provide an escalated level of support for customer operational issues.</li>\n<li>Work with the Databricks technical team, Project Manager, Architect and Customer team to ensure the technical components of the engagement are delivered to meet customer&#39;s needs.</li>\n<li>Work with Engineering and Databricks Customer Support to provide product and implementation feedback and to guide rapid resolution for engagement specific product and support issues.</li>\n</ul>\n<p>What we look for:</p>\n<ul>\n<li>6+ years experience in data engineering, data platforms &amp; analytics</li>\n<li>Comfortable writing code in either Python or Scala</li>\n<li>Working knowledge of two or more common Cloud ecosystems (AWS, Azure, GCP) with expertise in at least one</li>\n<li>Deep experience with distributed computing with Apache Spark™ and knowledge of Spark runtime internals</li>\n<li>Familiarity with CI/CD for production deployments</li>\n<li>Working knowledge of MLOps</li>\n<li>Design and deployment of performant end-to-end data architectures</li>\n<li>Experience with technical project delivery - managing scope and timelines.</li>\n<li>Documentation and white-boarding skills.</li>\n<li>Experience working with clients and managing conflicts.</li>\n<li>Build skills in technical areas which support the deployment and integration of Databricks-based solutions to complete customer projects.</li>\n<li>Travel to customers 20% of the time</li>\n</ul>\n<p>Databricks Certification</p>\n<p>Pay Range Transparency</p>\n<p>Databricks is committed to fair and equitable compensation practices. The pay range(s) for this role is listed below and represents the expected base salary range for non-commissionable roles or on-target earnings for commissionable roles.</p>\n<p>Actual compensation packages are based on several factors that are unique to each candidate, including but not limited to job-related skills, depth of experience, relevant certifications and training, and specific work location.</p>\n<p>Based on the factors above, Databricks anticipated utilizing the full width of the range.</p>\n<p>The total compensation package for this position may also include eligibility for annual performance bonus, equity, and the benefits listed above.</p>\n<p>For more information regarding which range your location is in visit our page here.</p>\n<p>Zone 1 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 2 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 3 Pay Range $180,656-$248,360 USD</p>\n<p>Zone 4 Pay Range $180,656-$248,360 USD</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_507bea17-ad7","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Databricks","sameAs":"https://databricks.com","logo":"https://logos.yubhub.co/databricks.com.png"},"x-apply-url":"https://job-boards.greenhouse.io/databricks/jobs/8461251002","x-work-arrangement":"onsite","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$180,656-$248,360 USD","x-skills-required":["data engineering","data science","cloud technology","Apache Spark","CI/CD","MLOps","distributed computing","Python","Scala","AWS","Azure","GCP"],"x-skills-preferred":[],"datePosted":"2026-04-18T15:39:19.614Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Mountain View, California"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Technology","skills":"data engineering, data science, cloud technology, Apache Spark, CI/CD, MLOps, distributed computing, Python, Scala, AWS, Azure, GCP","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":180656,"maxValue":248360,"unitText":"YEAR"}}},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_4eb8e004-e19"},"title":"Java Engineer, Associate","description":"<p>At BlackRock, we&#39;re looking for a Java Engineer to join our team. As a Java Engineer, you&#39;ll be working on our investment operating system Aladdin, which is used by financial institutions worldwide. You&#39;ll be part of the Aladdin Engineering team, responsible for building the next generation of technology that changes the way information, people, and technology intersect for global investment firms.</p>\n<p>Responsibilities:</p>\n<ul>\n<li>Take ownership of individual project priorities, deadlines, and deliverables using AGILE methodologies.</li>\n<li>Deliver high-efficiency, high-availability, concurrent, and fault-tolerant software systems.</li>\n<li>Contribute to the development of Aladdin&#39;s global, multi-asset trading platform.</li>\n<li>Design and develop innovative solutions to complex problems, identifying issues and roadblocks.</li>\n<li>Demonstrate vision when brainstorming solutions for team productivity, efficiency, guiding, and motivating developers.</li>\n</ul>\n<p>Qualifications:</p>\n<ul>\n<li>Master&#39;s Degree or PhD in Computer Science, Engineering, or Mathematics.</li>\n<li>Hands-on experience in Java or web development (JavaScript).</li>\n<li>Good understanding of concurrent programming and design of high-throughput, high-availability, fault-tolerant distributed applications and databases.</li>\n<li>Strong interest in distributed systems, infrastructure services, cloud technology, and AI/ML techniques and technology.</li>\n<li>Prior experience in building distributed applications using SQL and/or NoSQL technologies such as MSSQL, MongoDB, Snowflake, or Redis is a plus.</li>\n<li>Prior experience with message broker technology such as Kafka or gRPC is a plus.</li>\n<li>Prior experience in modern front-end frameworks such as React, Vue.js, or Angular is a plus. Angular preferred.</li>\n<li>Excellent analytical and software architecture design skills, with an emphasis on test-driven development.</li>\n<li>Effective communication and presentation skills, both written and verbal</li>\n</ul>\n<p>Our benefits include retirement investment and tools designed to help you build a sound financial future, access to education reimbursement, comprehensive resources to support your physical health and emotional well-being, family support programs, and Flexible Time Off (FTO) so you can relax, recharge, and be there for the people you care about.</p>\n<p>Our hybrid work model is designed to enable a culture of collaboration and apprenticeship that enriches the experience of our employees, while supporting flexibility for all. Employees are currently required to work at least 4 days in the office per week, with the flexibility to work from home 1 day a week. Some business groups may require more time in the office due to their roles and responsibilities.</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_4eb8e004-e19","directApply":true,"hiringOrganization":{"@type":"Organization","name":"BlackRock","sameAs":"https://jobs.workable.com","logo":"https://logos.yubhub.co/view.com.png"},"x-apply-url":"https://jobs.workable.com/view/u3TPbEc65jhur5x3zbCSL7/java-engineer%2C-associate-in-edinburgh-at-blackrock","x-work-arrangement":"hybrid","x-experience-level":"mid","x-job-type":"full-time","x-salary-range":null,"x-skills-required":["Java","web development","concurrent programming","distributed systems","infrastructure services","cloud technology","AI/ML techniques and technology","SQL","NoSQL technologies","message broker technology","modern front-end frameworks"],"x-skills-preferred":["Angular","React","Vue.js"],"datePosted":"2026-03-09T16:43:39.038Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Edinburgh, Scotland"}},"employmentType":"FULL_TIME","occupationalCategory":"Engineering","industry":"Finance","skills":"Java, web development, concurrent programming, distributed systems, infrastructure services, cloud technology, AI/ML techniques and technology, SQL, NoSQL technologies, message broker technology, modern front-end frameworks, Angular, React, Vue.js"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_d55c291a-1f1"},"title":"Subtitle Translator, QCer, SDH Linguist, Template Linguist","description":"<p><strong>Join the Keywords Studios Talent Community – Subtitle Translation/Localization Experts</strong></p>\n<p>We&#39;re a global network of subtitle and localization specialists, partnering with iconic developers, publishers, and content creators. If you&#39;re a talented professional with expertise in subtitling, we&#39;d love to connect with you.</p>\n<p><strong>We&#39;re Looking for Freelance Subtitle Translation Opportunities</strong></p>\n<p>We&#39;re always on the lookout for talented professionals to join our network of subtitle and localization specialists. If you have experience in subtitling, QC, SDH, or template linguistics, we&#39;d love to hear from you.</p>\n<p><strong>Requirements</strong></p>\n<ul>\n<li>Native proficiency of the target language</li>\n<li>Strong command of the source language</li>\n<li>Experience in at least one of the following areas of the entertainment industry: localization QC, audiovisual translation and subtitling</li>\n<li>Experience with subtitle editing software and web/cloud technology</li>\n<li>Deep understanding of closed captioning and subtitling, and their common failures and technical challenges</li>\n<li>Solid understanding of nuances of subtitle and dub translations</li>\n<li>Working knowledge of cultural differences and best practices for subtitles and dub audio creation</li>\n<li>University degree or equivalent professional experience in the translation field</li>\n<li>Ability to quickly adapt to workflow/process changes and updates</li>\n<li>Great attention to detail, organization, problem-solving, analytical and multitasking skills</li>\n</ul>\n<p><strong>Benefits</strong></p>\n<ul>\n<li>Competitive pay</li>\n<li>Work on popular titles across film, TV, streaming, games and much more</li>\n<li>Early access to unreleased content</li>\n<li>Flexible project volume</li>\n<li>100% remote work</li>\n<li>Set your own schedule</li>\n<li>Constructive feedback and support</li>\n</ul>\n<p><strong>Please Note</strong></p>\n<ul>\n<li>Signing a Non-Disclosure Agreement (NDA) is required prior to starting the recruitment process</li>\n<li>Due to the high volume of applications, we regret that we are only able to respond to candidates who meet the above requirements</li>\n</ul>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_d55c291a-1f1","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Keywords Studios","sameAs":"https://apply.workable.com","logo":"https://logos.yubhub.co/j.com.png"},"x-apply-url":"https://apply.workable.com/j/A08655B082","x-work-arrangement":"remote","x-experience-level":"entry|mid|senior","x-job-type":"freelance","x-salary-range":null,"x-skills-required":["Subtitle editing software","Web/cloud technology","Closed captioning and subtitling","Subtitle and dub translations","Cultural differences and best practices"],"x-skills-preferred":["Localization QC","Audiovisual translation and subtitling","Template linguistics"],"datePosted":"2026-03-09T10:55:36.882Z","jobLocationType":"TELECOMMUTE","occupationalCategory":"Engineering","industry":"Technology","skills":"Subtitle editing software, Web/cloud technology, Closed captioning and subtitling, Subtitle and dub translations, Cultural differences and best practices, Localization QC, Audiovisual translation and subtitling, Template linguistics"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_8a0f394e-d0f"},"title":"Expert Subtitle Translator/QCer: English to Basque","description":"<p>We are seeking a freelance translator with proven experience in subtitling for the TV and movie industry to join our global localization network. As an Expert Subtitle Translator/QCer, you will work on adapting blockbuster game franchises and ensuring accessibility for film and TV audiences.</p>\n<p><strong>Responsibilities</strong></p>\n<ul>\n<li>Translate subtitles from English to Basque with high accuracy and attention to detail</li>\n<li>Collaborate with our localization teams to deliver high-impact experiences through expert translation, subtitling, dubbing, voice-over, and linguistic quality assurance services</li>\n<li>Work on popular titles across film, TV, streaming, games, and much more</li>\n<li>Adapt to workflow/process changes and updates quickly</li>\n<li>Provide constructive feedback and support to our teams</li>\n</ul>\n<p><strong>Requirements</strong></p>\n<ul>\n<li>Native language fluency of Basque</li>\n<li>Extensive knowledge of English</li>\n<li>University degree or equivalent professional experience in the translation field</li>\n<li>Experience in at least one of the following areas of the entertainment industry: localization QC, audiovisual translation, and subtitling</li>\n<li>Deep understanding of nuances of subtitle and dub translations</li>\n<li>Working knowledge of cultural differences and best practices for subtitles and dub audio creation</li>\n<li>Understanding of closed captioning and subtitling, and their common failures and technical challenges</li>\n<li>Confidence and experience with subtitle editing software and web/cloud technology</li>\n</ul>\n<p><strong>Benefits</strong></p>\n<ul>\n<li>Competitive pay</li>\n<li>Early access to unreleased content</li>\n<li>Flexible project volume</li>\n<li>100% remote work</li>\n<li>Set your own schedule</li>\n<li>Constructive feedback and support</li>\n</ul>\n<p><strong>Our Diversity, Equity, Inclusion, and Belonging (DEIB) Commitment</strong></p>\n<p>Keywords Studios is an Equal Opportunity Employer and considers applicants for all positions without regard to race, ethnicity, religion or belief, sex, age, national origin, marital status, sexual orientation, gender identity, disability, or any other characteristic protected by applicable laws.</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_8a0f394e-d0f","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Keywords Studios","sameAs":"https://apply.workable.com","logo":"https://logos.yubhub.co/j.com.png"},"x-apply-url":"https://apply.workable.com/j/70B3DA7C08","x-work-arrangement":"remote","x-experience-level":null,"x-job-type":"contract","x-salary-range":null,"x-skills-required":["Basque","English","Subtitle editing software","Web/cloud technology","Localization QC","Audiovisual translation","Subtitling"],"x-skills-preferred":["Subtitle editing software","Web/cloud technology"],"datePosted":"2026-03-09T10:54:38.622Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Basque Country, Spain"}},"jobLocationType":"TELECOMMUTE","employmentType":"CONTRACTOR","occupationalCategory":"Media & Entertainment","industry":"Entertainment","skills":"Basque, English, Subtitle editing software, Web/cloud technology, Localization QC, Audiovisual translation, Subtitling, Subtitle editing software, Web/cloud technology"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_5de0016f-87f"},"title":"Expert Subtitle Translator/QCer English to Tagalog | Filipino","description":"<p>We are looking for expert subtitling freelance translators with proven experience in subtitling for the TV and movie industry from English into Filipino/Tagalog. If you would like to pursue freelance translation opportunities with us, please send us your CV in English, providing detailed information regarding your subtitling experience.</p>\n<p>We are currently seeking a freelance translator with the following qualifications:</p>\n<p><strong>Requirements</strong></p>\n<ul>\n<li>Native language fluency of target language</li>\n<li>Extensive knowledge of source language</li>\n<li>University degree or equivalent professional experience in the translation field</li>\n<li>Experience in at least one of the following areas of the entertainment industry: localization QC, audiovisual translation, and subtitling</li>\n<li>Deep understanding of nuances of subtitle and dub translations</li>\n<li>Working knowledge of cultural differences and best practices for subtitles and dub audio creation</li>\n<li>Understanding of closed captioning and subtitling, and their common failures and technical challenges</li>\n<li>Confidence and experience with subtitle editing software and web/cloud technology</li>\n</ul>\n<p><strong>Benefits</strong></p>\n<ul>\n<li>Competitive pay</li>\n<li>Work on popular titles across film, TV, streaming, games, and much more</li>\n<li>Early access to unreleased content</li>\n<li>Flexible project volume</li>\n<li>100% remote work</li>\n<li>Set your own schedule</li>\n<li>Constructive feedback and support</li>\n</ul>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_5de0016f-87f","directApply":true,"hiringOrganization":{"@type":"Organization","name":"Keywords Studios Los Angeles","sameAs":"https://apply.workable.com","logo":"https://logos.yubhub.co/j.com.png"},"x-apply-url":"https://apply.workable.com/j/1B59FE5260","x-work-arrangement":"remote","x-experience-level":"senior","x-job-type":"contract","x-salary-range":null,"x-skills-required":["subtitle editing software","web/cloud technology","audiovisual translation","subtitling","closed captioning"],"x-skills-preferred":["subtitle editing software","web/cloud technology"],"datePosted":"2026-03-09T10:54:31.608Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"Metro Manila, Philippines"}},"jobLocationType":"TELECOMMUTE","employmentType":"CONTRACTOR","occupationalCategory":"Media & Entertainment","industry":"Media & Entertainment","skills":"subtitle editing software, web/cloud technology, audiovisual translation, subtitling, closed captioning, subtitle editing software, web/cloud technology"},{"@context":"https://schema.org","@type":"JobPosting","identifier":{"@type":"PropertyValue","name":"YubHub","value":"job_939b991c-cd9"},"title":"Strategic Finance, Compute Lead","description":"<p><strong>Location</strong></p>\n<p>San Francisco</p>\n<p><strong>Employment Type</strong></p>\n<p>Full time</p>\n<p><strong>Department</strong></p>\n<p>Strategic Finance</p>\n<p><strong>Compensation</strong></p>\n<ul>\n<li>$185K – $260K • Offers Equity</li>\n</ul>\n<p>The base pay offered may vary depending on multiple individualized factors, including market location, job-related knowledge, skills, and experience. If the role is non-exempt, overtime pay will be provided consistent with applicable laws. In addition to the salary range listed above, total compensation also includes generous equity, performance-related bonus(es) for eligible employees, and the following benefits.</p>\n<ul>\n<li>Medical, dental, and vision insurance for you and your family, with employer contributions to Health Savings Accounts</li>\n</ul>\n<ul>\n<li>Pre-tax accounts for Health FSA, Dependent Care FSA, and commuter expenses (parking and transit)</li>\n</ul>\n<ul>\n<li>401(k) retirement plan with employer match</li>\n</ul>\n<ul>\n<li>Paid parental leave (up to 24 weeks for birth parents and 20 weeks for non-birthing parents), plus paid medical and caregiver leave (up to 8 weeks)</li>\n</ul>\n<ul>\n<li>Paid time off: flexible PTO for exempt employees and up to 15 days annually for non-exempt employees</li>\n</ul>\n<ul>\n<li>13+ paid company holidays, and multiple paid coordinated company office closures throughout the year for focus and recharge, plus paid sick or safe time (1 hour per 30 hours worked, or more, as required by applicable state or local law)</li>\n</ul>\n<ul>\n<li>Mental health and wellness support</li>\n</ul>\n<ul>\n<li>Employer-paid basic life and disability coverage</li>\n</ul>\n<ul>\n<li>Annual learning and development stipend to fuel your professional growth</li>\n</ul>\n<ul>\n<li>Daily meals in our offices, and meal delivery credits as eligible</li>\n</ul>\n<ul>\n<li>Relocation support for eligible employees</li>\n</ul>\n<ul>\n<li>Additional taxable fringe benefits, such as charitable donation matching and wellness stipends, may also be provided.</li>\n</ul>\n<p>More details about our benefits are available to candidates during the hiring process.</p>\n<p>This role is at-will and OpenAI reserves the right to modify base pay and other compensation components at any time based on individual performance, team or company results, or market conditions.</p>\n<p><strong>About the Team</strong></p>\n<p>The Compute &amp; Infrastructure Strategy team handles strategy and execution of OpenAI’s compute roadmap. This team’s key responsibilities span financial analysis &amp; reporting, capacity planning, commercial and business development, and strategic partnerships. We partner across the business to allocate and deploy our resources for the highest impact outcomes.</p>\n<p><strong>About the Role</strong></p>\n<p>Compute is a key lever for OpenAI and AI progress. We are seeking a Strategic Finance Compute Lead to provide finance leadership for our compute and infrastructure spend and play a significant role in shaping our long-term compute strategy. You will play a critical role developing financial models for all areas of compute, analyzing spend patterns, and providing critical insights to optimize and plan for our future compute needs. This role will be a key partner to our scaling and supercomputing engineering teams providing financial expertise and guidance to optimize our capacity investments and drive strategic decision-making, while collaborating with other members of the finance organization to align our compute strategy with broader financial considerations.</p>\n<p>This role is based in San Francisco, CA. We use a hybrid work model of 3 days in the office per week and offer relocation assistance to new employees.</p>\n<p><strong>In this role, you will:</strong></p>\n<ul>\n<li>Own and develop financial models across different elements of compute (GPUs, CPUs, storage and networking)</li>\n<li>Lead strategic financial analysis for long-term capacity initiatives, working closely with scaling and supercomputing engineering teams</li>\n<li>Maintain deep expertise on compute contract terms, pricing structures and optimization opportunities</li>\n<li>Serve as a partner to FP&amp;A and strategic finance teams, aligning compute and infrastructure with broader financial and business strategies</li>\n<li>Create high-quality Exec and Board-facing presentations</li>\n<li>Stay abreast of market trends and competitive dynamics to inform and improve our infrastructure strategy</li>\n</ul>\n<p><strong>You might thrive in this role if you have:</strong></p>\n<ul>\n<li>5+ years of experience across strategic finance, private / growth equity, investment banking, strategy &amp; operations, and/or business development with 3+ years of finance operating experience at a high-growth technology company</li>\n<li>Experience partnering with engineering and product teams to provide financial analysis and insights to critical strategic decisions</li>\n<li>Good understanding of cloud technology and compute infrastructure</li>\n<li>Exceptionally strong analytical, financial modeling, and written and oral communication skills</li>\n<li>Demonstrated track record of thoughtful investment decisions</li>\n<li>Experience driving operational outcomes under ambitious deadlines</li>\n<li>Exceptionally strong relationship building, business judgment, and communication skills</li>\n<li>Bachelor’s degree or equivalent practical experience</li>\n</ul>\n<p><strong>About OpenAI</strong></p>\n<p>OpenAI is an AI research and deployment company dedicated to ensuring that general-purpose artificial intelligence benefits all of humanity. We push the boundaries of the capabilities of AI systems and seek to safely deploy them to the world through our products. AI is an extremely powerful tool that must be created with safety and human needs at its core, and to achieve our mission, we must encompass and value the many different perspectives, voices, and experiences that form the full spectrum of humanity.</p>\n<p style=\"margin-top:24px;font-size:13px;color:#666;\">XML job scraping automation by <a href=\"https://yubhub.co\">YubHub</a></p>","url":"https://yubhub.co/jobs/job_939b991c-cd9","directApply":true,"hiringOrganization":{"@type":"Organization","name":"OpenAI","sameAs":"https://jobs.ashbyhq.com","logo":"https://logos.yubhub.co/openai.com.png"},"x-apply-url":"https://jobs.ashbyhq.com/openai/b4196174-9cc3-487d-9d21-848bc283b80f","x-work-arrangement":"hybrid","x-experience-level":"senior","x-job-type":"full-time","x-salary-range":"$185K – $260K","x-skills-required":["strategic finance","private / growth equity","investment banking","strategy & operations","business development","financial modeling","cloud technology","compute infrastructure","relationship building","business judgment","communication skills"],"x-skills-preferred":["data analysis","data visualization","financial planning","budgeting","forecasting","financial reporting","accounting","auditing","taxation","financial regulations"],"datePosted":"2026-03-06T18:32:49.111Z","jobLocation":{"@type":"Place","address":{"@type":"PostalAddress","addressLocality":"San Francisco"}},"employmentType":"FULL_TIME","occupationalCategory":"Finance","industry":"Technology","skills":"strategic finance, private / growth equity, investment banking, strategy & operations, business development, financial modeling, cloud technology, compute infrastructure, relationship building, business judgment, communication skills, data analysis, data visualization, financial planning, budgeting, forecasting, financial reporting, accounting, auditing, taxation, financial regulations","baseSalary":{"@type":"MonetaryAmount","currency":"USD","value":{"@type":"QuantitativeValue","minValue":185000,"maxValue":260000,"unitText":"YEAR"}}}]}