무회blog

python: pytorch,transformer , pipline , 提问解答사용해보기 본문

Python/TA

python: pytorch,transformer , pipline , 提问解答사용해보기

최무회 2020. 8. 13. 18:18

 

 

 

In [6]:
from __future__ import print_function
import ipywidgets as widgets
from transformers import pipeline
print('success')
 
success
In [7]:
nlp_sentence_classif = pipeline('sentiment-analysis')
nlp_sentence_classif('Such a nice weather outside !')
 
 
 
 
 
 
 
 
 
 
 
 
Out[7]:
[{'label': 'POSITIVE', 'score': 0.9997655749320984}]
In [12]:
nlp_token_class = pipeline('ner')
nlp_token_class('Hugging Face is a French company based in New-York.')
Out[12]:
[{'word': 'Hu', 'score': 0.9970937967300415, 'entity': 'I-ORG', 'index': 1},
 {'word': '##gging',
  'score': 0.9345751404762268,
  'entity': 'I-ORG',
  'index': 2},
 {'word': 'Face', 'score': 0.9787060618400574, 'entity': 'I-ORG', 'index': 3},
 {'word': 'French',
  'score': 0.9981995820999146,
  'entity': 'I-MISC',
  'index': 6},
 {'word': 'New', 'score': 0.9983047246932983, 'entity': 'I-LOC', 'index': 10},
 {'word': '-', 'score': 0.8913456797599792, 'entity': 'I-LOC', 'index': 11},
 {'word': 'York', 'score': 0.9979523420333862, 'entity': 'I-LOC', 'index': 12}]
In [13]:
nlp_qa = pipeline('question-answering')
nlp_qa(context='Hugging Face is a French company based in New-York.', question='Where is based Hugging Face ?')
 
 
 
 
 
 
 
 
 
 
 
 
Out[13]:
{'score': 0.9628767777442973, 'start': 42, 'end': 50, 'answer': 'New-York.'}
In [16]:
dir(pipeline)
Out[16]:
['__annotations__',
 '__call__',
 '__class__',
 '__closure__',
 '__code__',
 '__defaults__',
 '__delattr__',
 '__dict__',
 '__dir__',
 '__doc__',
 '__eq__',
 '__format__',
 '__ge__',
 '__get__',
 '__getattribute__',
 '__globals__',
 '__gt__',
 '__hash__',
 '__init__',
 '__init_subclass__',
 '__kwdefaults__',
 '__le__',
 '__lt__',
 '__module__',
 '__name__',
 '__ne__',
 '__new__',
 '__qualname__',
 '__reduce__',
 '__reduce_ex__',
 '__repr__',
 '__setattr__',
 '__sizeof__',
 '__str__',
 '__subclasshook__']
In [ ]:
from transformers import BertForSequenceClassification
model = BertForSequenceClassification.from_pretrained('bert-base-uncased')
model.train()
Comments