# /// script # requires-python = ">=3.12" # dependencies = [ # "transformers", # "torch", # ] # /// # Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("text-generation", model="openbmb/MiniCPM4-MCP", trust_remote_code=True) messages = [ {"role": "user", "content": "Who are you?"}, ] pipe(messages) # Load model directly from transformers import AutoModel model = AutoModel.from_pretrained("openbmb/MiniCPM4-MCP", trust_remote_code=True)