AL
AlfredPros: CodeLLaMa 7B Instruct Solidity
A finetuned 7 billion parameters Code LLaMA - Instruct model to generate Solidity smart contract using 4-bit QLoRA finetuning provided by PEFT library.
textcode
Specifications
| Context Window | 4K |
| Modalities | text, code |
| Status | available |
| Category | third-party |
| Model ID | alfredpros/codellama-7b-instruct-solidity |
Quick Start
TypeScript
import OpenAI from 'openai'
const client = new OpenAI({
apiKey: process.env.HANZO_API_KEY,
baseURL: 'https://api.hanzo.ai/v1'
})
const response = await client.chat.completions.create({
model: 'alfredpros/codellama-7b-instruct-solidity',
messages: [{ role: 'user', content: 'Hello!' }]
})
console.log(response.choices[0].message.content)Python
from openai import OpenAI
client = OpenAI(
api_key=os.environ["HANZO_API_KEY"],
base_url="https://api.hanzo.ai/v1"
)
response = client.chat.completions.create(
model="alfredpros/codellama-7b-instruct-solidity",
messages=[{"role": "user", "content": "Hello!"}]
)
print(response.choices[0].message.content)cURL
curl https://api.hanzo.ai/v1/chat/completions \
-H "Content-Type: application/json" \
-H "Authorization: Bearer $HANZO_API_KEY" \
-d '{
"model": "alfredpros/codellama-7b-instruct-solidity",
"messages": [{"role": "user", "content": "Hello!"}]
}'Go
package main
import (
"context"
"fmt"
"os"
"github.com/sashabaranov/go-openai"
)
func main() {
cfg := openai.DefaultConfig(os.Getenv("HANZO_API_KEY"))
cfg.BaseURL = "https://api.hanzo.ai/v1"
client := openai.NewClientWithConfig(cfg)
resp, _ := client.CreateChatCompletion(context.Background(),
openai.ChatCompletionRequest{
Model: "alfredpros/codellama-7b-instruct-solidity",
Messages: []openai.ChatCompletionMessage{
{Role: openai.ChatMessageRoleUser, Content: "Hello!"},
},
},
)
fmt.Println(resp.Choices[0].Message.Content)
}Use AlfredPros: CodeLLaMa 7B Instruct Solidity via Hanzo AI
One API key. 390+ models. OpenAI-compatible. Start free.