Request
curl -X POST https://api.iterationlayer.com/document-extraction/v1/extract \
-H "Authorization: Bearer YOUR_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"files": [
{
"type": "url",
"name": "paper.pdf",
"url": "https://example.com/papers/research-paper.pdf"
}
],
"schema": {
"fields": [
{
"name": "title",
"type": "TEXT",
"description": "Title of the academic paper"
},
{
"name": "authors",
"type": "ARRAY",
"description": "List of paper authors",
"fields": [
{
"name": "name",
"type": "TEXT",
"description": "Full name of the author"
}
]
},
{
"name": "abstract",
"type": "TEXTAREA",
"description": "Paper abstract"
},
{
"name": "published_date",
"type": "DATE",
"description": "Publication date of the paper"
},
{
"name": "keywords",
"type": "ARRAY",
"description": "Subject keywords or tags",
"fields": [
{
"name": "keyword",
"type": "TEXT",
"description": "A keyword or topic tag"
}
]
}
]
}
}'Response
{
"success": true,
"data": {
"title": {
"value": "Attention Is All You Need",
"confidence": 0.99,
"citations": [
"Attention Is All You Need"
]
},
"authors": {
"value": [
{
"name": {
"value": "Ashish Vaswani",
"confidence": 0.98,
"citations": ["Ashish Vaswani"]
}
},
{
"name": {
"value": "Noam Shazeer",
"confidence": 0.97,
"citations": ["Noam Shazeer"]
}
}
],
"confidence": 0.97,
"citations": []
},
"abstract": {
"value": "The dominant sequence transduction models are based on complex recurrent or convolutional neural networks. We propose a new simple network architecture, the Transformer, based solely on attention mechanisms.",
"confidence": 0.95,
"citations": [
"The dominant sequence transduction models are based on complex recurrent or convolutional neural networks"
]
},
"published_date": {
"value": "2017-06-12",
"confidence": 0.94,
"citations": ["12 Jun 2017"]
},
"keywords": {
"value": [
{
"keyword": {
"value": "transformer",
"confidence": 0.96,
"citations": ["Transformer"]
}
},
{
"keyword": {
"value": "attention mechanism",
"confidence": 0.95,
"citations": [
"attention mechanisms"
]
}
}
],
"confidence": 0.95,
"citations": []
}
}
}Request
import { IterationLayer } from "iterationlayer";
const client = new IterationLayer({ apiKey: "YOUR_API_KEY" });
const result = await client.extract({
files: [
{
type: "url",
name: "paper.pdf",
url: "https://example.com/papers/research-paper.pdf",
},
],
schema: {
fields: [
{
name: "title",
type: "TEXT",
description: "Title of the academic paper",
},
{
name: "authors",
type: "ARRAY",
description: "List of paper authors",
fields: [
{
name: "name",
type: "TEXT",
description: "Full name of the author",
},
],
},
{
name: "abstract",
type: "TEXTAREA",
description: "Paper abstract",
},
{
name: "published_date",
type: "DATE",
description: "Publication date of the paper",
},
{
name: "keywords",
type: "ARRAY",
description: "Subject keywords or tags",
fields: [
{
name: "keyword",
type: "TEXT",
description: "A keyword or topic tag",
},
],
},
],
},
});
console.log(result);Response
{
"success": true,
"data": {
"title": {
"value": "Attention Is All You Need",
"confidence": 0.99,
"citations": [
"Attention Is All You Need"
]
},
"authors": {
"value": [
{
"name": {
"value": "Ashish Vaswani",
"confidence": 0.98,
"citations": ["Ashish Vaswani"]
}
},
{
"name": {
"value": "Noam Shazeer",
"confidence": 0.97,
"citations": ["Noam Shazeer"]
}
}
],
"confidence": 0.97,
"citations": []
},
"abstract": {
"value": "The dominant sequence transduction models are based on complex recurrent or convolutional neural networks. We propose a new simple network architecture, the Transformer, based solely on attention mechanisms.",
"confidence": 0.95,
"citations": [
"The dominant sequence transduction models are based on complex recurrent or convolutional neural networks"
]
},
"published_date": {
"value": "2017-06-12",
"confidence": 0.94,
"citations": ["12 Jun 2017"]
},
"keywords": {
"value": [
{
"keyword": {
"value": "transformer",
"confidence": 0.96,
"citations": ["Transformer"]
}
},
{
"keyword": {
"value": "attention mechanism",
"confidence": 0.95,
"citations": [
"attention mechanisms"
]
}
}
],
"confidence": 0.95,
"citations": []
}
}
}Request
from iterationlayer import IterationLayer
client = IterationLayer(api_key="YOUR_API_KEY")
result = client.extract(
files=[
{
"type": "url",
"name": "paper.pdf",
"url": "https://example.com/papers/research-paper.pdf",
}
],
schema={
"fields": [
{
"name": "title",
"type": "TEXT",
"description": "Title of the academic paper",
},
{
"name": "authors",
"type": "ARRAY",
"description": "List of paper authors",
"fields": [
{
"name": "name",
"type": "TEXT",
"description": "Full name of the author",
},
],
},
{
"name": "abstract",
"type": "TEXTAREA",
"description": "Paper abstract",
},
{
"name": "published_date",
"type": "DATE",
"description": "Publication date of the paper",
},
{
"name": "keywords",
"type": "ARRAY",
"description": "Subject keywords or tags",
"fields": [
{
"name": "keyword",
"type": "TEXT",
"description": "A keyword or topic tag",
},
],
},
]
},
)
print(result)Response
{
"success": true,
"data": {
"title": {
"value": "Attention Is All You Need",
"confidence": 0.99,
"citations": [
"Attention Is All You Need"
]
},
"authors": {
"value": [
{
"name": {
"value": "Ashish Vaswani",
"confidence": 0.98,
"citations": ["Ashish Vaswani"]
}
},
{
"name": {
"value": "Noam Shazeer",
"confidence": 0.97,
"citations": ["Noam Shazeer"]
}
}
],
"confidence": 0.97,
"citations": []
},
"abstract": {
"value": "The dominant sequence transduction models are based on complex recurrent or convolutional neural networks. We propose a new simple network architecture, the Transformer, based solely on attention mechanisms.",
"confidence": 0.95,
"citations": [
"The dominant sequence transduction models are based on complex recurrent or convolutional neural networks"
]
},
"published_date": {
"value": "2017-06-12",
"confidence": 0.94,
"citations": ["12 Jun 2017"]
},
"keywords": {
"value": [
{
"keyword": {
"value": "transformer",
"confidence": 0.96,
"citations": ["Transformer"]
}
},
{
"keyword": {
"value": "attention mechanism",
"confidence": 0.95,
"citations": [
"attention mechanisms"
]
}
}
],
"confidence": 0.95,
"citations": []
}
}
}Request
package main
import (
"fmt"
il "github.com/iterationlayer/sdk-go"
)
func main() {
client := il.NewClient("YOUR_API_KEY")
result, err := client.Extract(il.ExtractRequest{
Files: []il.FileInput{
il.NewFileFromURL("paper.pdf", "https://example.com/papers/research-paper.pdf"),
},
Schema: il.ExtractionSchema{
"title": il.NewTextFieldConfig(
"title",
"Title of the academic paper",
),
"authors": il.NewArrayFieldConfig(
"authors",
"List of paper authors",
il.ExtractionSchema{
"name": il.NewTextFieldConfig(
"name",
"Full name of the author",
),
},
),
"abstract": il.NewTextFieldConfig(
"abstract", "Paper abstract",
),
"published_date": il.NewDateFieldConfig(
"published_date",
"Publication date of the paper",
),
"keywords": il.NewArrayFieldConfig(
"keywords",
"Subject keywords or tags",
il.ExtractionSchema{
"keyword": il.NewTextFieldConfig(
"keyword",
"A keyword or topic tag",
),
},
),
},
})
if err != nil {
panic(err)
}
fmt.Println(result)
}Response
{
"success": true,
"data": {
"title": {
"value": "Attention Is All You Need",
"confidence": 0.99,
"citations": [
"Attention Is All You Need"
]
},
"authors": {
"value": [
{
"name": {
"value": "Ashish Vaswani",
"confidence": 0.98,
"citations": ["Ashish Vaswani"]
}
},
{
"name": {
"value": "Noam Shazeer",
"confidence": 0.97,
"citations": ["Noam Shazeer"]
}
}
],
"confidence": 0.97,
"citations": []
},
"abstract": {
"value": "The dominant sequence transduction models are based on complex recurrent or convolutional neural networks. We propose a new simple network architecture, the Transformer, based solely on attention mechanisms.",
"confidence": 0.95,
"citations": [
"The dominant sequence transduction models are based on complex recurrent or convolutional neural networks"
]
},
"published_date": {
"value": "2017-06-12",
"confidence": 0.94,
"citations": ["12 Jun 2017"]
},
"keywords": {
"value": [
{
"keyword": {
"value": "transformer",
"confidence": 0.96,
"citations": ["Transformer"]
}
},
{
"keyword": {
"value": "attention mechanism",
"confidence": 0.95,
"citations": [
"attention mechanisms"
]
}
}
],
"confidence": 0.95,
"citations": []
}
}
}