Skip to content

Commit

Permalink
fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
aniketmaurya committed Jan 18, 2024
1 parent 682be42 commit dc7adab
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
1 change: 1 addition & 0 deletions src/fastserve/models/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@

if args.model == "ssd-1b":
app = ServeSSD1B(device=device, timeout=args.timeout, batch_size=args.batch_size)

elif args.model == "sdxl-turbo":
app = ServeSDXLTurbo(
device=device, timeout=args.timeout, batch_size=args.batch_size
Expand Down
3 changes: 1 addition & 2 deletions src/fastserve/models/sdxl_turbo.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,11 @@ def __init__(
"It is recommended to use inference_steps=1 for SDXL Turbo model."
)
self.num_inference_steps = num_inference_steps
self.input_schema = PromptRequest
self.pipe = AutoPipelineForText2Image.from_pretrained(
"stabilityai/sdxl-turbo", torch_dtype=torch.float16, variant="fp16"
)
self.pipe.to(device)
super().__init__(batch_size, timeout)
super().__init__(batch_size, timeout, input_schema=PromptRequest)

def handle(self, batch: List[PromptRequest]) -> List[StreamingResponse]:
prompts = [b.prompt for b in batch]
Expand Down

0 comments on commit dc7adab

Please sign in to comment.